From 46e5ac9672f2d898d44f7d89349a5faf54a300b8 Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Thu, 15 Jan 2026 18:54:42 +0800
Subject: [PATCH 01/99] =?UTF-8?q?fix(=E7=BD=91=E5=85=B3):=20=E5=AF=B9?=
=?UTF-8?q?=E9=BD=90=20Claude=20OAuth=20=E8=AF=B7=E6=B1=82=E9=80=82?=
=?UTF-8?q?=E9=85=8D?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
backend/internal/pkg/claude/constants.go | 44 +-
backend/internal/service/gateway_service.go | 454 ++++++++++++++++++-
backend/internal/service/identity_service.go | 8 +-
3 files changed, 481 insertions(+), 25 deletions(-)
diff --git a/backend/internal/pkg/claude/constants.go b/backend/internal/pkg/claude/constants.go
index d1a56a84..15144881 100644
--- a/backend/internal/pkg/claude/constants.go
+++ b/backend/internal/pkg/claude/constants.go
@@ -25,15 +25,15 @@ const APIKeyHaikuBetaHeader = BetaInterleavedThinking
// DefaultHeaders 是 Claude Code 客户端默认请求头。
var DefaultHeaders = map[string]string{
- "User-Agent": "claude-cli/2.0.62 (external, cli)",
+ "User-Agent": "claude-cli/2.1.2 (external, cli)",
"X-Stainless-Lang": "js",
- "X-Stainless-Package-Version": "0.52.0",
+ "X-Stainless-Package-Version": "0.70.0",
"X-Stainless-OS": "Linux",
"X-Stainless-Arch": "x64",
"X-Stainless-Runtime": "node",
- "X-Stainless-Runtime-Version": "v22.14.0",
+ "X-Stainless-Runtime-Version": "v24.3.0",
"X-Stainless-Retry-Count": "0",
- "X-Stainless-Timeout": "60",
+ "X-Stainless-Timeout": "600",
"X-App": "cli",
"Anthropic-Dangerous-Direct-Browser-Access": "true",
}
@@ -79,3 +79,39 @@ func DefaultModelIDs() []string {
// DefaultTestModel 测试时使用的默认模型
const DefaultTestModel = "claude-sonnet-4-5-20250929"
+
+// ModelIDOverrides Claude OAuth 请求需要的模型 ID 映射
+var ModelIDOverrides = map[string]string{
+ "claude-sonnet-4-5": "claude-sonnet-4-5-20250929",
+ "claude-opus-4-5": "claude-opus-4-5-20251101",
+ "claude-haiku-4-5": "claude-haiku-4-5-20251001",
+}
+
+// ModelIDReverseOverrides 用于将上游模型 ID 还原为短名
+var ModelIDReverseOverrides = map[string]string{
+ "claude-sonnet-4-5-20250929": "claude-sonnet-4-5",
+ "claude-opus-4-5-20251101": "claude-opus-4-5",
+ "claude-haiku-4-5-20251001": "claude-haiku-4-5",
+}
+
+// NormalizeModelID 根据 Claude OAuth 规则映射模型
+func NormalizeModelID(id string) string {
+ if id == "" {
+ return id
+ }
+ if mapped, ok := ModelIDOverrides[id]; ok {
+ return mapped
+ }
+ return id
+}
+
+// DenormalizeModelID 将上游模型 ID 转换为短名
+func DenormalizeModelID(id string) string {
+ if id == "" {
+ return id
+ }
+ if mapped, ok := ModelIDReverseOverrides[id]; ok {
+ return mapped
+ }
+ return id
+}
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index d5eb0e52..1d29b3fd 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -17,12 +17,14 @@ import (
"strings"
"sync/atomic"
"time"
+ "unicode"
"github.com/Wei-Shaw/sub2api/internal/config"
"github.com/Wei-Shaw/sub2api/internal/pkg/claude"
"github.com/Wei-Shaw/sub2api/internal/pkg/ctxkey"
"github.com/Wei-Shaw/sub2api/internal/util/responseheaders"
"github.com/Wei-Shaw/sub2api/internal/util/urlvalidator"
+ "github.com/google/uuid"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
@@ -44,6 +46,36 @@ var (
sseDataRe = regexp.MustCompile(`^data:\s*`)
sessionIDRegex = regexp.MustCompile(`session_([a-f0-9-]{36})`)
claudeCliUserAgentRe = regexp.MustCompile(`^claude-cli/\d+\.\d+\.\d+`)
+ toolPrefixRe = regexp.MustCompile(`(?i)^(?:oc_|mcp_)`)
+ toolNameBoundaryRe = regexp.MustCompile(`[^a-zA-Z0-9]+`)
+ toolNameCamelRe = regexp.MustCompile(`([a-z0-9])([A-Z])`)
+
+ claudeToolNameOverrides = map[string]string{
+ "bash": "Bash",
+ "read": "Read",
+ "edit": "Edit",
+ "write": "Write",
+ "task": "Task",
+ "glob": "Glob",
+ "grep": "Grep",
+ "webfetch": "WebFetch",
+ "websearch": "WebSearch",
+ "todowrite": "TodoWrite",
+ "question": "AskUserQuestion",
+ }
+ openCodeToolOverrides = map[string]string{
+ "Bash": "bash",
+ "Read": "read",
+ "Edit": "edit",
+ "Write": "write",
+ "Task": "task",
+ "Glob": "glob",
+ "Grep": "grep",
+ "WebFetch": "webfetch",
+ "WebSearch": "websearch",
+ "TodoWrite": "todowrite",
+ "AskUserQuestion": "question",
+ }
// claudeCodePromptPrefixes 用于检测 Claude Code 系统提示词的前缀列表
// 支持多种变体:标准版、Agent SDK 版、Explore Agent 版、Compact 版等
@@ -346,6 +378,268 @@ func (s *GatewayService) replaceModelInBody(body []byte, newModel string) []byte
return newBody
}
+type claudeOAuthNormalizeOptions struct {
+ injectMetadata bool
+ metadataUserID string
+ stripSystemCacheControl bool
+}
+
+func stripToolPrefix(value string) string {
+ if value == "" {
+ return value
+ }
+ return toolPrefixRe.ReplaceAllString(value, "")
+}
+
+func toPascalCase(value string) string {
+ if value == "" {
+ return value
+ }
+ normalized := toolNameBoundaryRe.ReplaceAllString(value, " ")
+ tokens := make([]string, 0)
+ for _, token := range strings.Fields(normalized) {
+ expanded := toolNameCamelRe.ReplaceAllString(token, "$1 $2")
+ parts := strings.Fields(expanded)
+ if len(parts) > 0 {
+ tokens = append(tokens, parts...)
+ }
+ }
+ if len(tokens) == 0 {
+ return value
+ }
+ var builder strings.Builder
+ for _, token := range tokens {
+ lower := strings.ToLower(token)
+ if lower == "" {
+ continue
+ }
+ runes := []rune(lower)
+ runes[0] = unicode.ToUpper(runes[0])
+ builder.WriteString(string(runes))
+ }
+ return builder.String()
+}
+
+func toSnakeCase(value string) string {
+ if value == "" {
+ return value
+ }
+ output := toolNameCamelRe.ReplaceAllString(value, "$1_$2")
+ output = toolNameBoundaryRe.ReplaceAllString(output, "_")
+ output = strings.Trim(output, "_")
+ return strings.ToLower(output)
+}
+
+func normalizeToolNameForClaude(name string, cache map[string]string) string {
+ if name == "" {
+ return name
+ }
+ stripped := stripToolPrefix(name)
+ mapped, ok := claudeToolNameOverrides[strings.ToLower(stripped)]
+ if !ok {
+ mapped = toPascalCase(stripped)
+ }
+ if mapped != "" && cache != nil && mapped != stripped {
+ cache[mapped] = stripped
+ }
+ if mapped == "" {
+ return stripped
+ }
+ return mapped
+}
+
+func normalizeToolNameForOpenCode(name string, cache map[string]string) string {
+ if name == "" {
+ return name
+ }
+ if cache != nil {
+ if mapped, ok := cache[name]; ok {
+ return mapped
+ }
+ }
+ if mapped, ok := openCodeToolOverrides[name]; ok {
+ return mapped
+ }
+ return toSnakeCase(name)
+}
+
+func stripCacheControlFromSystemBlocks(system any) bool {
+ blocks, ok := system.([]any)
+ if !ok {
+ return false
+ }
+ changed := false
+ for _, item := range blocks {
+ block, ok := item.(map[string]any)
+ if !ok {
+ continue
+ }
+ if _, exists := block["cache_control"]; !exists {
+ continue
+ }
+ if text, ok := block["text"].(string); ok && text == claudeCodeSystemPrompt {
+ continue
+ }
+ delete(block, "cache_control")
+ changed = true
+ }
+ return changed
+}
+
+func normalizeClaudeOAuthRequestBody(body []byte, modelID string, opts claudeOAuthNormalizeOptions) ([]byte, string, map[string]string) {
+ if len(body) == 0 {
+ return body, modelID, nil
+ }
+ var req map[string]any
+ if err := json.Unmarshal(body, &req); err != nil {
+ return body, modelID, nil
+ }
+
+ toolNameMap := make(map[string]string)
+
+ if rawModel, ok := req["model"].(string); ok {
+ normalized := claude.NormalizeModelID(rawModel)
+ if normalized != rawModel {
+ req["model"] = normalized
+ modelID = normalized
+ }
+ }
+
+ if rawTools, exists := req["tools"]; exists {
+ switch tools := rawTools.(type) {
+ case []any:
+ for idx, tool := range tools {
+ toolMap, ok := tool.(map[string]any)
+ if !ok {
+ continue
+ }
+ if name, ok := toolMap["name"].(string); ok {
+ normalized := normalizeToolNameForClaude(name, toolNameMap)
+ if normalized != "" && normalized != name {
+ toolMap["name"] = normalized
+ }
+ }
+ tools[idx] = toolMap
+ }
+ req["tools"] = tools
+ case map[string]any:
+ normalizedTools := make(map[string]any, len(tools))
+ for name, value := range tools {
+ normalized := normalizeToolNameForClaude(name, toolNameMap)
+ if normalized == "" {
+ normalized = name
+ }
+ if toolMap, ok := value.(map[string]any); ok {
+ if toolName, ok := toolMap["name"].(string); ok {
+ mappedName := normalizeToolNameForClaude(toolName, toolNameMap)
+ if mappedName != "" && mappedName != toolName {
+ toolMap["name"] = mappedName
+ }
+ } else if normalized != name {
+ toolMap["name"] = normalized
+ }
+ normalizedTools[normalized] = toolMap
+ continue
+ }
+ normalizedTools[normalized] = value
+ }
+ req["tools"] = normalizedTools
+ }
+ } else {
+ req["tools"] = []any{}
+ }
+
+ if messages, ok := req["messages"].([]any); ok {
+ for _, msg := range messages {
+ msgMap, ok := msg.(map[string]any)
+ if !ok {
+ continue
+ }
+ content, ok := msgMap["content"].([]any)
+ if !ok {
+ continue
+ }
+ for _, block := range content {
+ blockMap, ok := block.(map[string]any)
+ if !ok {
+ continue
+ }
+ if blockType, _ := blockMap["type"].(string); blockType != "tool_use" {
+ continue
+ }
+ if name, ok := blockMap["name"].(string); ok {
+ normalized := normalizeToolNameForClaude(name, toolNameMap)
+ if normalized != "" && normalized != name {
+ blockMap["name"] = normalized
+ }
+ }
+ }
+ }
+ }
+
+ if opts.stripSystemCacheControl {
+ if system, ok := req["system"]; ok {
+ _ = stripCacheControlFromSystemBlocks(system)
+ }
+ }
+
+ if opts.injectMetadata && opts.metadataUserID != "" {
+ metadata, ok := req["metadata"].(map[string]any)
+ if !ok {
+ metadata = map[string]any{}
+ req["metadata"] = metadata
+ }
+ if existing, ok := metadata["user_id"].(string); !ok || existing == "" {
+ metadata["user_id"] = opts.metadataUserID
+ }
+ }
+
+ if _, ok := req["temperature"]; ok {
+ delete(req, "temperature")
+ }
+ if _, ok := req["tool_choice"]; ok {
+ delete(req, "tool_choice")
+ }
+
+ newBody, err := json.Marshal(req)
+ if err != nil {
+ return body, modelID, toolNameMap
+ }
+ return newBody, modelID, toolNameMap
+}
+
+func (s *GatewayService) buildOAuthMetadataUserID(parsed *ParsedRequest, account *Account, fp *Fingerprint) string {
+ if parsed == nil || fp == nil || fp.ClientID == "" {
+ return ""
+ }
+ if parsed.MetadataUserID != "" {
+ return ""
+ }
+ accountUUID := account.GetExtraString("account_uuid")
+ if accountUUID == "" {
+ return ""
+ }
+ sessionHash := s.GenerateSessionHash(parsed)
+ sessionID := uuid.NewString()
+ if sessionHash != "" {
+ seed := fmt.Sprintf("%d::%s", account.ID, sessionHash)
+ sessionID = generateSessionUUID(seed)
+ }
+ return fmt.Sprintf("user_%s_account_%s_session_%s", fp.ClientID, accountUUID, sessionID)
+}
+
+func generateSessionUUID(seed string) string {
+ if seed == "" {
+ return uuid.NewString()
+ }
+ hash := sha256.Sum256([]byte(seed))
+ bytes := hash[:16]
+ bytes[6] = (bytes[6] & 0x0f) | 0x40
+ bytes[8] = (bytes[8] & 0x3f) | 0x80
+ return fmt.Sprintf("%x-%x-%x-%x-%x",
+ bytes[0:4], bytes[4:6], bytes[6:8], bytes[8:10], bytes[10:16])
+}
+
// SelectAccount 选择账号(粘性会话+优先级)
func (s *GatewayService) SelectAccount(ctx context.Context, groupID *int64, sessionHash string) (*Account, error) {
return s.SelectAccountForModel(ctx, groupID, sessionHash, "")
@@ -1423,21 +1717,36 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
body := parsed.Body
reqModel := parsed.Model
reqStream := parsed.Stream
+ originalModel := reqModel
+ var toolNameMap map[string]string
- // 智能注入 Claude Code 系统提示词(仅 OAuth/SetupToken 账号需要)
- // 条件:1) OAuth/SetupToken 账号 2) 不是 Claude Code 客户端 3) 不是 Haiku 模型 4) system 中还没有 Claude Code 提示词
- if account.IsOAuth() &&
- !isClaudeCodeClient(c.GetHeader("User-Agent"), parsed.MetadataUserID) &&
- !strings.Contains(strings.ToLower(reqModel), "haiku") &&
- !systemIncludesClaudeCodePrompt(parsed.System) {
- body = injectClaudeCodePrompt(body, parsed.System)
+ if account.IsOAuth() {
+ // 智能注入 Claude Code 系统提示词(仅 OAuth/SetupToken 账号需要)
+ // 条件:1) OAuth/SetupToken 账号 2) 不是 Claude Code 客户端 3) 不是 Haiku 模型 4) system 中还没有 Claude Code 提示词
+ if !isClaudeCodeClient(c.GetHeader("User-Agent"), parsed.MetadataUserID) &&
+ !strings.Contains(strings.ToLower(reqModel), "haiku") &&
+ !systemIncludesClaudeCodePrompt(parsed.System) {
+ body = injectClaudeCodePrompt(body, parsed.System)
+ }
+
+ normalizeOpts := claudeOAuthNormalizeOptions{stripSystemCacheControl: true}
+ if s.identityService != nil {
+ fp, err := s.identityService.GetOrCreateFingerprint(ctx, account.ID, c.Request.Header)
+ if err == nil && fp != nil {
+ if metadataUserID := s.buildOAuthMetadataUserID(parsed, account, fp); metadataUserID != "" {
+ normalizeOpts.injectMetadata = true
+ normalizeOpts.metadataUserID = metadataUserID
+ }
+ }
+ }
+
+ body, reqModel, toolNameMap = normalizeClaudeOAuthRequestBody(body, reqModel, normalizeOpts)
}
// 强制执行 cache_control 块数量限制(最多 4 个)
body = enforceCacheControlLimit(body)
// 应用模型映射(仅对apikey类型账号)
- originalModel := reqModel
if account.Type == AccountTypeAPIKey {
mappedModel := account.GetMappedModel(reqModel)
if mappedModel != reqModel {
@@ -1465,7 +1774,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
retryStart := time.Now()
for attempt := 1; attempt <= maxRetryAttempts; attempt++ {
// 构建上游请求(每次重试需要重新构建,因为请求体需要重新读取)
- upstreamReq, err := s.buildUpstreamRequest(ctx, c, account, body, token, tokenType, reqModel)
+ upstreamReq, err := s.buildUpstreamRequest(ctx, c, account, body, token, tokenType, reqModel, reqStream)
if err != nil {
return nil, err
}
@@ -1541,7 +1850,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
// also downgrade tool_use/tool_result blocks to text.
filteredBody := FilterThinkingBlocksForRetry(body)
- retryReq, buildErr := s.buildUpstreamRequest(ctx, c, account, filteredBody, token, tokenType, reqModel)
+ retryReq, buildErr := s.buildUpstreamRequest(ctx, c, account, filteredBody, token, tokenType, reqModel, reqStream)
if buildErr == nil {
retryResp, retryErr := s.httpUpstream.Do(retryReq, proxyURL, account.ID, account.Concurrency)
if retryErr == nil {
@@ -1572,7 +1881,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
if looksLikeToolSignatureError(msg2) && time.Since(retryStart) < maxRetryElapsed {
log.Printf("Account %d: signature retry still failing and looks tool-related, retrying with tool blocks downgraded", account.ID)
filteredBody2 := FilterSignatureSensitiveBlocksForRetry(body)
- retryReq2, buildErr2 := s.buildUpstreamRequest(ctx, c, account, filteredBody2, token, tokenType, reqModel)
+ retryReq2, buildErr2 := s.buildUpstreamRequest(ctx, c, account, filteredBody2, token, tokenType, reqModel, reqStream)
if buildErr2 == nil {
retryResp2, retryErr2 := s.httpUpstream.Do(retryReq2, proxyURL, account.ID, account.Concurrency)
if retryErr2 == nil {
@@ -1785,7 +2094,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
var firstTokenMs *int
var clientDisconnect bool
if reqStream {
- streamResult, err := s.handleStreamingResponse(ctx, resp, c, account, startTime, originalModel, reqModel)
+ streamResult, err := s.handleStreamingResponse(ctx, resp, c, account, startTime, originalModel, reqModel, toolNameMap)
if err != nil {
if err.Error() == "have error in stream" {
return nil, &UpstreamFailoverError{
@@ -1798,7 +2107,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
firstTokenMs = streamResult.firstTokenMs
clientDisconnect = streamResult.clientDisconnect
} else {
- usage, err = s.handleNonStreamingResponse(ctx, resp, c, account, originalModel, reqModel)
+ usage, err = s.handleNonStreamingResponse(ctx, resp, c, account, originalModel, reqModel, toolNameMap)
if err != nil {
return nil, err
}
@@ -1815,7 +2124,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
}, nil
}
-func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Context, account *Account, body []byte, token, tokenType, modelID string) (*http.Request, error) {
+func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Context, account *Account, body []byte, token, tokenType, modelID string, reqStream bool) (*http.Request, error) {
// 确定目标URL
targetURL := claudeAPIURL
if account.Type == AccountTypeAPIKey {
@@ -1884,6 +2193,9 @@ func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Contex
if req.Header.Get("anthropic-version") == "" {
req.Header.Set("anthropic-version", "2023-06-01")
}
+ if tokenType == "oauth" {
+ applyClaudeOAuthHeaderDefaults(req, reqStream)
+ }
// 处理anthropic-beta header(OAuth账号需要特殊处理)
if tokenType == "oauth" {
@@ -1966,6 +2278,26 @@ func defaultAPIKeyBetaHeader(body []byte) string {
return claude.APIKeyBetaHeader
}
+func applyClaudeOAuthHeaderDefaults(req *http.Request, isStream bool) {
+ if req == nil {
+ return
+ }
+ if req.Header.Get("accept") == "" {
+ req.Header.Set("accept", "application/json")
+ }
+ for key, value := range claude.DefaultHeaders {
+ if value == "" {
+ continue
+ }
+ if req.Header.Get(key) == "" {
+ req.Header.Set(key, value)
+ }
+ }
+ if isStream && req.Header.Get("x-stainless-helper-method") == "" {
+ req.Header.Set("x-stainless-helper-method", "stream")
+ }
+}
+
func truncateForLog(b []byte, maxBytes int) string {
if maxBytes <= 0 {
maxBytes = 2048
@@ -2246,7 +2578,7 @@ type streamingResult struct {
clientDisconnect bool // 客户端是否在流式传输过程中断开
}
-func (s *GatewayService) handleStreamingResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account, startTime time.Time, originalModel, mappedModel string) (*streamingResult, error) {
+func (s *GatewayService) handleStreamingResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account, startTime time.Time, originalModel, mappedModel string, toolNameMap map[string]string) (*streamingResult, error) {
// 更新5h窗口状态
s.rateLimitService.UpdateSessionWindow(ctx, account, resp.Header)
@@ -2339,6 +2671,7 @@ func (s *GatewayService) handleStreamingResponse(ctx context.Context, resp *http
}
needModelReplace := originalModel != mappedModel
+ rewriteTools := account.IsOAuth()
clientDisconnected := false // 客户端断开标志,断开后继续读取上游以获取完整usage
for {
@@ -2380,11 +2713,14 @@ func (s *GatewayService) handleStreamingResponse(ctx context.Context, resp *http
// Extract data from SSE line (supports both "data: " and "data:" formats)
var data string
if sseDataRe.MatchString(line) {
- data = sseDataRe.ReplaceAllString(line, "")
// 如果有模型映射,替换响应中的model字段
if needModelReplace {
line = s.replaceModelInSSELine(line, mappedModel, originalModel)
}
+ if rewriteTools {
+ line = s.replaceToolNamesInSSELine(line, toolNameMap)
+ }
+ data = sseDataRe.ReplaceAllString(line, "")
}
// 写入客户端(统一处理 data 行和非 data 行)
@@ -2467,6 +2803,61 @@ func (s *GatewayService) replaceModelInSSELine(line, fromModel, toModel string)
return "data: " + string(newData)
}
+func rewriteToolNamesInValue(value any, toolNameMap map[string]string) bool {
+ switch v := value.(type) {
+ case map[string]any:
+ changed := false
+ if blockType, _ := v["type"].(string); blockType == "tool_use" {
+ if name, ok := v["name"].(string); ok {
+ mapped := normalizeToolNameForOpenCode(name, toolNameMap)
+ if mapped != name {
+ v["name"] = mapped
+ changed = true
+ }
+ }
+ }
+ for _, item := range v {
+ if rewriteToolNamesInValue(item, toolNameMap) {
+ changed = true
+ }
+ }
+ return changed
+ case []any:
+ changed := false
+ for _, item := range v {
+ if rewriteToolNamesInValue(item, toolNameMap) {
+ changed = true
+ }
+ }
+ return changed
+ default:
+ return false
+ }
+}
+
+func (s *GatewayService) replaceToolNamesInSSELine(line string, toolNameMap map[string]string) string {
+ if !sseDataRe.MatchString(line) {
+ return line
+ }
+ data := sseDataRe.ReplaceAllString(line, "")
+ if data == "" || data == "[DONE]" {
+ return line
+ }
+
+ var event map[string]any
+ if err := json.Unmarshal([]byte(data), &event); err != nil {
+ return line
+ }
+ if !rewriteToolNamesInValue(event, toolNameMap) {
+ return line
+ }
+ newData, err := json.Marshal(event)
+ if err != nil {
+ return line
+ }
+ return "data: " + string(newData)
+}
+
func (s *GatewayService) parseSSEUsage(data string, usage *ClaudeUsage) {
// 解析message_start获取input tokens(标准Claude API格式)
var msgStart struct {
@@ -2508,7 +2899,7 @@ func (s *GatewayService) parseSSEUsage(data string, usage *ClaudeUsage) {
}
}
-func (s *GatewayService) handleNonStreamingResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account, originalModel, mappedModel string) (*ClaudeUsage, error) {
+func (s *GatewayService) handleNonStreamingResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account, originalModel, mappedModel string, toolNameMap map[string]string) (*ClaudeUsage, error) {
// 更新5h窗口状态
s.rateLimitService.UpdateSessionWindow(ctx, account, resp.Header)
@@ -2529,6 +2920,9 @@ func (s *GatewayService) handleNonStreamingResponse(ctx context.Context, resp *h
if originalModel != mappedModel {
body = s.replaceModelInResponseBody(body, mappedModel, originalModel)
}
+ if account.IsOAuth() {
+ body = s.replaceToolNamesInResponseBody(body, toolNameMap)
+ }
responseheaders.WriteFilteredHeaders(c.Writer.Header(), resp.Header, s.cfg.Security.ResponseHeaders)
@@ -2566,6 +2960,24 @@ func (s *GatewayService) replaceModelInResponseBody(body []byte, fromModel, toMo
return newBody
}
+func (s *GatewayService) replaceToolNamesInResponseBody(body []byte, toolNameMap map[string]string) []byte {
+ if len(body) == 0 {
+ return body
+ }
+ var resp map[string]any
+ if err := json.Unmarshal(body, &resp); err != nil {
+ return body
+ }
+ if !rewriteToolNamesInValue(resp, toolNameMap) {
+ return body
+ }
+ newBody, err := json.Marshal(resp)
+ if err != nil {
+ return body
+ }
+ return newBody
+}
+
// RecordUsageInput 记录使用量的输入参数
type RecordUsageInput struct {
Result *ForwardResult
@@ -2729,6 +3141,11 @@ func (s *GatewayService) ForwardCountTokens(ctx context.Context, c *gin.Context,
body := parsed.Body
reqModel := parsed.Model
+ if account.IsOAuth() {
+ normalizeOpts := claudeOAuthNormalizeOptions{stripSystemCacheControl: true}
+ body, reqModel, _ = normalizeClaudeOAuthRequestBody(body, reqModel, normalizeOpts)
+ }
+
// Antigravity 账户不支持 count_tokens 转发,直接返回空值
if account.Platform == PlatformAntigravity {
c.JSON(http.StatusOK, gin.H{"input_tokens": 0})
@@ -2917,6 +3334,9 @@ func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Con
if req.Header.Get("anthropic-version") == "" {
req.Header.Set("anthropic-version", "2023-06-01")
}
+ if tokenType == "oauth" {
+ applyClaudeOAuthHeaderDefaults(req, false)
+ }
// OAuth 账号:处理 anthropic-beta header
if tokenType == "oauth" {
diff --git a/backend/internal/service/identity_service.go b/backend/internal/service/identity_service.go
index 1ffa8057..4ab1ab96 100644
--- a/backend/internal/service/identity_service.go
+++ b/backend/internal/service/identity_service.go
@@ -24,13 +24,13 @@ var (
// 默认指纹值(当客户端未提供时使用)
var defaultFingerprint = Fingerprint{
- UserAgent: "claude-cli/2.0.62 (external, cli)",
+ UserAgent: "claude-cli/2.1.2 (external, cli)",
StainlessLang: "js",
- StainlessPackageVersion: "0.52.0",
+ StainlessPackageVersion: "0.70.0",
StainlessOS: "Linux",
StainlessArch: "x64",
StainlessRuntime: "node",
- StainlessRuntimeVersion: "v22.14.0",
+ StainlessRuntimeVersion: "v24.3.0",
}
// Fingerprint represents account fingerprint data
@@ -230,7 +230,7 @@ func generateUUIDFromSeed(seed string) string {
}
// parseUserAgentVersion 解析user-agent版本号
-// 例如:claude-cli/2.0.62 -> (2, 0, 62)
+// 例如:claude-cli/2.1.2 -> (2, 1, 2)
func parseUserAgentVersion(ua string) (major, minor, patch int, ok bool) {
// 匹配 xxx/x.y.z 格式
matches := userAgentVersionRegex.FindStringSubmatch(ua)
From c579439c1ea42636ed7e7447e133a98bedfa7091 Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Thu, 15 Jan 2026 19:17:07 +0800
Subject: [PATCH 02/99] =?UTF-8?q?fix(=E7=BD=91=E5=85=B3):=20=E5=8C=BA?=
=?UTF-8?q?=E5=88=86=20Claude=20Code=20OAuth=20=E9=80=82=E9=85=8D?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
backend/internal/handler/gateway_handler.go | 3 +
backend/internal/pkg/claude/constants.go | 4 +
backend/internal/service/gateway_service.go | 110 +++++++++++++++-----
3 files changed, 90 insertions(+), 27 deletions(-)
diff --git a/backend/internal/handler/gateway_handler.go b/backend/internal/handler/gateway_handler.go
index b60618a8..91d590bf 100644
--- a/backend/internal/handler/gateway_handler.go
+++ b/backend/internal/handler/gateway_handler.go
@@ -707,6 +707,9 @@ func (h *GatewayHandler) CountTokens(c *gin.Context) {
return
}
+ // 检查是否为 Claude Code 客户端,设置到 context 中
+ SetClaudeCodeClientContext(c, body)
+
setOpsRequestContext(c, "", false, body)
parsedReq, err := service.ParseGatewayRequest(body)
diff --git a/backend/internal/pkg/claude/constants.go b/backend/internal/pkg/claude/constants.go
index 15144881..f60412c2 100644
--- a/backend/internal/pkg/claude/constants.go
+++ b/backend/internal/pkg/claude/constants.go
@@ -9,11 +9,15 @@ const (
BetaClaudeCode = "claude-code-20250219"
BetaInterleavedThinking = "interleaved-thinking-2025-05-14"
BetaFineGrainedToolStreaming = "fine-grained-tool-streaming-2025-05-14"
+ BetaTokenCounting = "token-counting-2024-11-01"
)
// DefaultBetaHeader Claude Code 客户端默认的 anthropic-beta header
const DefaultBetaHeader = BetaClaudeCode + "," + BetaOAuth + "," + BetaInterleavedThinking + "," + BetaFineGrainedToolStreaming
+// CountTokensBetaHeader count_tokens 请求使用的 anthropic-beta header
+const CountTokensBetaHeader = BetaClaudeCode + "," + BetaOAuth + "," + BetaInterleavedThinking + "," + BetaTokenCounting
+
// HaikuBetaHeader Haiku 模型使用的 anthropic-beta header(不需要 claude-code beta)
const HaikuBetaHeader = BetaOAuth + "," + BetaInterleavedThinking
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 1d29b3fd..904b5acd 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -49,6 +49,8 @@ var (
toolPrefixRe = regexp.MustCompile(`(?i)^(?:oc_|mcp_)`)
toolNameBoundaryRe = regexp.MustCompile(`[^a-zA-Z0-9]+`)
toolNameCamelRe = regexp.MustCompile(`([a-z0-9])([A-Z])`)
+ toolNameFieldRe = regexp.MustCompile(`"name"\s*:\s*"([^"]+)"`)
+ modelFieldRe = regexp.MustCompile(`"model"\s*:\s*"([^"]+)"`)
claudeToolNameOverrides = map[string]string{
"bash": "Bash",
@@ -1458,6 +1460,16 @@ func isClaudeCodeClient(userAgent string, metadataUserID string) bool {
return claudeCliUserAgentRe.MatchString(userAgent)
}
+func isClaudeCodeRequest(ctx context.Context, c *gin.Context, parsed *ParsedRequest) bool {
+ if IsClaudeCodeClient(ctx) {
+ return true
+ }
+ if parsed == nil || c == nil {
+ return false
+ }
+ return isClaudeCodeClient(c.GetHeader("User-Agent"), parsed.MetadataUserID)
+}
+
// systemIncludesClaudeCodePrompt 检查 system 中是否已包含 Claude Code 提示词
// 使用前缀匹配支持多种变体(标准版、Agent SDK 版等)
func systemIncludesClaudeCodePrompt(system any) bool {
@@ -1720,11 +1732,13 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
originalModel := reqModel
var toolNameMap map[string]string
- if account.IsOAuth() {
+ isClaudeCode := isClaudeCodeRequest(ctx, c, parsed)
+ shouldMimicClaudeCode := account.IsOAuth() && !isClaudeCode
+
+ if shouldMimicClaudeCode {
// 智能注入 Claude Code 系统提示词(仅 OAuth/SetupToken 账号需要)
// 条件:1) OAuth/SetupToken 账号 2) 不是 Claude Code 客户端 3) 不是 Haiku 模型 4) system 中还没有 Claude Code 提示词
- if !isClaudeCodeClient(c.GetHeader("User-Agent"), parsed.MetadataUserID) &&
- !strings.Contains(strings.ToLower(reqModel), "haiku") &&
+ if !strings.Contains(strings.ToLower(reqModel), "haiku") &&
!systemIncludesClaudeCodePrompt(parsed.System) {
body = injectClaudeCodePrompt(body, parsed.System)
}
@@ -1774,7 +1788,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
retryStart := time.Now()
for attempt := 1; attempt <= maxRetryAttempts; attempt++ {
// 构建上游请求(每次重试需要重新构建,因为请求体需要重新读取)
- upstreamReq, err := s.buildUpstreamRequest(ctx, c, account, body, token, tokenType, reqModel, reqStream)
+ upstreamReq, err := s.buildUpstreamRequest(ctx, c, account, body, token, tokenType, reqModel, reqStream, shouldMimicClaudeCode)
if err != nil {
return nil, err
}
@@ -1850,7 +1864,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
// also downgrade tool_use/tool_result blocks to text.
filteredBody := FilterThinkingBlocksForRetry(body)
- retryReq, buildErr := s.buildUpstreamRequest(ctx, c, account, filteredBody, token, tokenType, reqModel, reqStream)
+ retryReq, buildErr := s.buildUpstreamRequest(ctx, c, account, filteredBody, token, tokenType, reqModel, reqStream, shouldMimicClaudeCode)
if buildErr == nil {
retryResp, retryErr := s.httpUpstream.Do(retryReq, proxyURL, account.ID, account.Concurrency)
if retryErr == nil {
@@ -1881,7 +1895,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
if looksLikeToolSignatureError(msg2) && time.Since(retryStart) < maxRetryElapsed {
log.Printf("Account %d: signature retry still failing and looks tool-related, retrying with tool blocks downgraded", account.ID)
filteredBody2 := FilterSignatureSensitiveBlocksForRetry(body)
- retryReq2, buildErr2 := s.buildUpstreamRequest(ctx, c, account, filteredBody2, token, tokenType, reqModel, reqStream)
+ retryReq2, buildErr2 := s.buildUpstreamRequest(ctx, c, account, filteredBody2, token, tokenType, reqModel, reqStream, shouldMimicClaudeCode)
if buildErr2 == nil {
retryResp2, retryErr2 := s.httpUpstream.Do(retryReq2, proxyURL, account.ID, account.Concurrency)
if retryErr2 == nil {
@@ -2094,7 +2108,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
var firstTokenMs *int
var clientDisconnect bool
if reqStream {
- streamResult, err := s.handleStreamingResponse(ctx, resp, c, account, startTime, originalModel, reqModel, toolNameMap)
+ streamResult, err := s.handleStreamingResponse(ctx, resp, c, account, startTime, originalModel, reqModel, toolNameMap, shouldMimicClaudeCode)
if err != nil {
if err.Error() == "have error in stream" {
return nil, &UpstreamFailoverError{
@@ -2107,7 +2121,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
firstTokenMs = streamResult.firstTokenMs
clientDisconnect = streamResult.clientDisconnect
} else {
- usage, err = s.handleNonStreamingResponse(ctx, resp, c, account, originalModel, reqModel, toolNameMap)
+ usage, err = s.handleNonStreamingResponse(ctx, resp, c, account, originalModel, reqModel, toolNameMap, shouldMimicClaudeCode)
if err != nil {
return nil, err
}
@@ -2124,7 +2138,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
}, nil
}
-func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Context, account *Account, body []byte, token, tokenType, modelID string, reqStream bool) (*http.Request, error) {
+func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Context, account *Account, body []byte, token, tokenType, modelID string, reqStream bool, mimicClaudeCode bool) (*http.Request, error) {
// 确定目标URL
targetURL := claudeAPIURL
if account.Type == AccountTypeAPIKey {
@@ -2140,7 +2154,7 @@ func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Contex
// OAuth账号:应用统一指纹
var fingerprint *Fingerprint
- if account.IsOAuth() && s.identityService != nil {
+ if account.IsOAuth() && mimicClaudeCode && s.identityService != nil {
// 1. 获取或创建指纹(包含随机生成的ClientID)
fp, err := s.identityService.GetOrCreateFingerprint(ctx, account.ID, c.Request.Header)
if err != nil {
@@ -2193,12 +2207,12 @@ func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Contex
if req.Header.Get("anthropic-version") == "" {
req.Header.Set("anthropic-version", "2023-06-01")
}
- if tokenType == "oauth" {
+ if tokenType == "oauth" && mimicClaudeCode {
applyClaudeOAuthHeaderDefaults(req, reqStream)
}
// 处理anthropic-beta header(OAuth账号需要特殊处理)
- if tokenType == "oauth" {
+ if tokenType == "oauth" && mimicClaudeCode {
req.Header.Set("anthropic-beta", s.getBetaHeader(modelID, c.GetHeader("anthropic-beta")))
} else if s.cfg != nil && s.cfg.Gateway.InjectBetaForAPIKey && req.Header.Get("anthropic-beta") == "" {
// API-key:仅在请求显式使用 beta 特性且客户端未提供时,按需补齐(默认关闭)
@@ -2578,7 +2592,7 @@ type streamingResult struct {
clientDisconnect bool // 客户端是否在流式传输过程中断开
}
-func (s *GatewayService) handleStreamingResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account, startTime time.Time, originalModel, mappedModel string, toolNameMap map[string]string) (*streamingResult, error) {
+func (s *GatewayService) handleStreamingResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account, startTime time.Time, originalModel, mappedModel string, toolNameMap map[string]string, mimicClaudeCode bool) (*streamingResult, error) {
// 更新5h窗口状态
s.rateLimitService.UpdateSessionWindow(ctx, account, resp.Header)
@@ -2671,7 +2685,7 @@ func (s *GatewayService) handleStreamingResponse(ctx context.Context, resp *http
}
needModelReplace := originalModel != mappedModel
- rewriteTools := account.IsOAuth()
+ rewriteTools := mimicClaudeCode
clientDisconnected := false // 客户端断开标志,断开后继续读取上游以获取完整usage
for {
@@ -2835,6 +2849,37 @@ func rewriteToolNamesInValue(value any, toolNameMap map[string]string) bool {
}
}
+func replaceToolNamesInText(text string, toolNameMap map[string]string) string {
+ if text == "" {
+ return text
+ }
+ output := toolNameFieldRe.ReplaceAllStringFunc(text, func(match string) string {
+ submatches := toolNameFieldRe.FindStringSubmatch(match)
+ if len(submatches) < 2 {
+ return match
+ }
+ name := submatches[1]
+ mapped := normalizeToolNameForOpenCode(name, toolNameMap)
+ if mapped == name {
+ return match
+ }
+ return strings.Replace(match, name, mapped, 1)
+ })
+ output = modelFieldRe.ReplaceAllStringFunc(output, func(match string) string {
+ submatches := modelFieldRe.FindStringSubmatch(match)
+ if len(submatches) < 2 {
+ return match
+ }
+ model := submatches[1]
+ mapped := claude.DenormalizeModelID(model)
+ if mapped == model {
+ return match
+ }
+ return strings.Replace(match, model, mapped, 1)
+ })
+ return output
+}
+
func (s *GatewayService) replaceToolNamesInSSELine(line string, toolNameMap map[string]string) string {
if !sseDataRe.MatchString(line) {
return line
@@ -2846,7 +2891,11 @@ func (s *GatewayService) replaceToolNamesInSSELine(line string, toolNameMap map[
var event map[string]any
if err := json.Unmarshal([]byte(data), &event); err != nil {
- return line
+ replaced := replaceToolNamesInText(data, toolNameMap)
+ if replaced == data {
+ return line
+ }
+ return "data: " + replaced
}
if !rewriteToolNamesInValue(event, toolNameMap) {
return line
@@ -2899,7 +2948,7 @@ func (s *GatewayService) parseSSEUsage(data string, usage *ClaudeUsage) {
}
}
-func (s *GatewayService) handleNonStreamingResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account, originalModel, mappedModel string, toolNameMap map[string]string) (*ClaudeUsage, error) {
+func (s *GatewayService) handleNonStreamingResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account, originalModel, mappedModel string, toolNameMap map[string]string, mimicClaudeCode bool) (*ClaudeUsage, error) {
// 更新5h窗口状态
s.rateLimitService.UpdateSessionWindow(ctx, account, resp.Header)
@@ -2920,7 +2969,7 @@ func (s *GatewayService) handleNonStreamingResponse(ctx context.Context, resp *h
if originalModel != mappedModel {
body = s.replaceModelInResponseBody(body, mappedModel, originalModel)
}
- if account.IsOAuth() {
+ if mimicClaudeCode {
body = s.replaceToolNamesInResponseBody(body, toolNameMap)
}
@@ -2966,7 +3015,11 @@ func (s *GatewayService) replaceToolNamesInResponseBody(body []byte, toolNameMap
}
var resp map[string]any
if err := json.Unmarshal(body, &resp); err != nil {
- return body
+ replaced := replaceToolNamesInText(string(body), toolNameMap)
+ if replaced == string(body) {
+ return body
+ }
+ return []byte(replaced)
}
if !rewriteToolNamesInValue(resp, toolNameMap) {
return body
@@ -3141,7 +3194,10 @@ func (s *GatewayService) ForwardCountTokens(ctx context.Context, c *gin.Context,
body := parsed.Body
reqModel := parsed.Model
- if account.IsOAuth() {
+ isClaudeCode := isClaudeCodeRequest(ctx, c, parsed)
+ shouldMimicClaudeCode := account.IsOAuth() && !isClaudeCode
+
+ if shouldMimicClaudeCode {
normalizeOpts := claudeOAuthNormalizeOptions{stripSystemCacheControl: true}
body, reqModel, _ = normalizeClaudeOAuthRequestBody(body, reqModel, normalizeOpts)
}
@@ -3172,7 +3228,7 @@ func (s *GatewayService) ForwardCountTokens(ctx context.Context, c *gin.Context,
}
// 构建上游请求
- upstreamReq, err := s.buildCountTokensRequest(ctx, c, account, body, token, tokenType, reqModel)
+ upstreamReq, err := s.buildCountTokensRequest(ctx, c, account, body, token, tokenType, reqModel, shouldMimicClaudeCode)
if err != nil {
s.countTokensError(c, http.StatusInternalServerError, "api_error", "Failed to build request")
return err
@@ -3205,7 +3261,7 @@ func (s *GatewayService) ForwardCountTokens(ctx context.Context, c *gin.Context,
log.Printf("Account %d: detected thinking block signature error on count_tokens, retrying with filtered thinking blocks", account.ID)
filteredBody := FilterThinkingBlocksForRetry(body)
- retryReq, buildErr := s.buildCountTokensRequest(ctx, c, account, filteredBody, token, tokenType, reqModel)
+ retryReq, buildErr := s.buildCountTokensRequest(ctx, c, account, filteredBody, token, tokenType, reqModel, shouldMimicClaudeCode)
if buildErr == nil {
retryResp, retryErr := s.httpUpstream.Do(retryReq, proxyURL, account.ID, account.Concurrency)
if retryErr == nil {
@@ -3270,7 +3326,7 @@ func (s *GatewayService) ForwardCountTokens(ctx context.Context, c *gin.Context,
}
// buildCountTokensRequest 构建 count_tokens 上游请求
-func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Context, account *Account, body []byte, token, tokenType, modelID string) (*http.Request, error) {
+func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Context, account *Account, body []byte, token, tokenType, modelID string, mimicClaudeCode bool) (*http.Request, error) {
// 确定目标 URL
targetURL := claudeAPICountTokensURL
if account.Type == AccountTypeAPIKey {
@@ -3285,7 +3341,7 @@ func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Con
}
// OAuth 账号:应用统一指纹和重写 userID
- if account.IsOAuth() && s.identityService != nil {
+ if account.IsOAuth() && mimicClaudeCode && s.identityService != nil {
fp, err := s.identityService.GetOrCreateFingerprint(ctx, account.ID, c.Request.Header)
if err == nil {
accountUUID := account.GetExtraString("account_uuid")
@@ -3320,7 +3376,7 @@ func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Con
}
// OAuth 账号:应用指纹到请求头
- if account.IsOAuth() && s.identityService != nil {
+ if account.IsOAuth() && mimicClaudeCode && s.identityService != nil {
fp, _ := s.identityService.GetOrCreateFingerprint(ctx, account.ID, c.Request.Header)
if fp != nil {
s.identityService.ApplyFingerprint(req, fp)
@@ -3334,13 +3390,13 @@ func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Con
if req.Header.Get("anthropic-version") == "" {
req.Header.Set("anthropic-version", "2023-06-01")
}
- if tokenType == "oauth" {
+ if tokenType == "oauth" && mimicClaudeCode {
applyClaudeOAuthHeaderDefaults(req, false)
}
// OAuth 账号:处理 anthropic-beta header
- if tokenType == "oauth" {
- req.Header.Set("anthropic-beta", s.getBetaHeader(modelID, c.GetHeader("anthropic-beta")))
+ if tokenType == "oauth" && mimicClaudeCode {
+ req.Header.Set("anthropic-beta", claude.CountTokensBetaHeader)
} else if s.cfg != nil && s.cfg.Gateway.InjectBetaForAPIKey && req.Header.Get("anthropic-beta") == "" {
// API-key:与 messages 同步的按需 beta 注入(默认关闭)
if requestNeedsBetaFeatures(body) {
From 98b65e67f21189f441f92dec88ed40b3ba7e8561 Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Thu, 15 Jan 2026 21:42:13 +0800
Subject: [PATCH 03/99] fix(gateway): avoid injecting invalid SSE on client
cancel
---
.../service/openai_gateway_service.go | 6 +++
.../service/openai_gateway_service_test.go | 37 +++++++++++++++++++
2 files changed, 43 insertions(+)
diff --git a/backend/internal/service/openai_gateway_service.go b/backend/internal/service/openai_gateway_service.go
index 04a90fdd..d49be282 100644
--- a/backend/internal/service/openai_gateway_service.go
+++ b/backend/internal/service/openai_gateway_service.go
@@ -1064,6 +1064,12 @@ func (s *OpenAIGatewayService) handleStreamingResponse(ctx context.Context, resp
return &openaiStreamingResult{usage: usage, firstTokenMs: firstTokenMs}, nil
}
if ev.err != nil {
+ // 客户端断开/取消请求时,上游读取往往会返回 context canceled。
+ // /v1/responses 的 SSE 事件必须符合 OpenAI 协议;这里不注入自定义 error event,避免下游 SDK 解析失败。
+ if errors.Is(ev.err, context.Canceled) || errors.Is(ev.err, context.DeadlineExceeded) {
+ log.Printf("Context canceled during streaming, returning collected usage")
+ return &openaiStreamingResult{usage: usage, firstTokenMs: firstTokenMs}, nil
+ }
if errors.Is(ev.err, bufio.ErrTooLong) {
log.Printf("SSE line too long: account=%d max_size=%d error=%v", account.ID, maxLineSize, ev.err)
sendErrorEvent("response_too_large")
diff --git a/backend/internal/service/openai_gateway_service_test.go b/backend/internal/service/openai_gateway_service_test.go
index 42b88b7d..ead6e143 100644
--- a/backend/internal/service/openai_gateway_service_test.go
+++ b/backend/internal/service/openai_gateway_service_test.go
@@ -33,6 +33,11 @@ type stubConcurrencyCache struct {
ConcurrencyCache
}
+type cancelReadCloser struct{}
+
+func (c cancelReadCloser) Read(p []byte) (int, error) { return 0, context.Canceled }
+func (c cancelReadCloser) Close() error { return nil }
+
func (c stubConcurrencyCache) AcquireAccountSlot(ctx context.Context, accountID int64, maxConcurrency int, requestID string) (bool, error) {
return true, nil
}
@@ -174,6 +179,38 @@ func TestOpenAIStreamingTimeout(t *testing.T) {
}
}
+func TestOpenAIStreamingContextCanceledDoesNotInjectErrorEvent(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ cfg := &config.Config{
+ Gateway: config.GatewayConfig{
+ StreamDataIntervalTimeout: 0,
+ StreamKeepaliveInterval: 0,
+ MaxLineSize: defaultMaxLineSize,
+ },
+ }
+ svc := &OpenAIGatewayService{cfg: cfg}
+
+ rec := httptest.NewRecorder()
+ c, _ := gin.CreateTestContext(rec)
+ ctx, cancel := context.WithCancel(context.Background())
+ cancel()
+ c.Request = httptest.NewRequest(http.MethodPost, "/", nil).WithContext(ctx)
+
+ resp := &http.Response{
+ StatusCode: http.StatusOK,
+ Body: cancelReadCloser{},
+ Header: http.Header{},
+ }
+
+ _, err := svc.handleStreamingResponse(c.Request.Context(), resp, c, &Account{ID: 1}, time.Now(), "model", "model")
+ if err != nil {
+ t.Fatalf("expected nil error, got %v", err)
+ }
+ if strings.Contains(rec.Body.String(), "event: error") || strings.Contains(rec.Body.String(), "stream_read_error") {
+ t.Fatalf("expected no injected SSE error event, got %q", rec.Body.String())
+ }
+}
+
func TestOpenAIStreamingTooLong(t *testing.T) {
gin.SetMode(gin.TestMode)
cfg := &config.Config{
From c11f14f3a030c30846183704ccd6193785899bd4 Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Thu, 15 Jan 2026 21:51:14 +0800
Subject: [PATCH 04/99] fix(gateway): drain upstream after client disconnect
---
.../service/openai_gateway_service.go | 43 ++++++++++----
.../service/openai_gateway_service_test.go | 59 +++++++++++++++++++
2 files changed, 91 insertions(+), 11 deletions(-)
diff --git a/backend/internal/service/openai_gateway_service.go b/backend/internal/service/openai_gateway_service.go
index d49be282..fb811e9e 100644
--- a/backend/internal/service/openai_gateway_service.go
+++ b/backend/internal/service/openai_gateway_service.go
@@ -1046,8 +1046,9 @@ func (s *OpenAIGatewayService) handleStreamingResponse(ctx context.Context, resp
// 仅发送一次错误事件,避免多次写入导致协议混乱(写失败时尽力通知客户端)
errorEventSent := false
+ clientDisconnected := false // 客户端断开后继续 drain 上游以收集 usage
sendErrorEvent := func(reason string) {
- if errorEventSent {
+ if errorEventSent || clientDisconnected {
return
}
errorEventSent = true
@@ -1070,6 +1071,11 @@ func (s *OpenAIGatewayService) handleStreamingResponse(ctx context.Context, resp
log.Printf("Context canceled during streaming, returning collected usage")
return &openaiStreamingResult{usage: usage, firstTokenMs: firstTokenMs}, nil
}
+ // 客户端已断开时,上游出错仅影响体验,不影响计费;返回已收集 usage
+ if clientDisconnected {
+ log.Printf("Upstream read error after client disconnect: %v, returning collected usage", ev.err)
+ return &openaiStreamingResult{usage: usage, firstTokenMs: firstTokenMs}, nil
+ }
if errors.Is(ev.err, bufio.ErrTooLong) {
log.Printf("SSE line too long: account=%d max_size=%d error=%v", account.ID, maxLineSize, ev.err)
sendErrorEvent("response_too_large")
@@ -1091,12 +1097,15 @@ func (s *OpenAIGatewayService) handleStreamingResponse(ctx context.Context, resp
line = s.replaceModelInSSELine(line, mappedModel, originalModel)
}
- // Forward line
- if _, err := fmt.Fprintf(w, "%s\n", line); err != nil {
- sendErrorEvent("write_failed")
- return &openaiStreamingResult{usage: usage, firstTokenMs: firstTokenMs}, err
+ // 写入客户端(客户端断开后继续 drain 上游)
+ if !clientDisconnected {
+ if _, err := fmt.Fprintf(w, "%s\n", line); err != nil {
+ clientDisconnected = true
+ log.Printf("Client disconnected during streaming, continuing to drain upstream for billing")
+ } else {
+ flusher.Flush()
+ }
}
- flusher.Flush()
// Record first token time
if firstTokenMs == nil && data != "" && data != "[DONE]" {
@@ -1106,11 +1115,14 @@ func (s *OpenAIGatewayService) handleStreamingResponse(ctx context.Context, resp
s.parseSSEUsage(data, usage)
} else {
// Forward non-data lines as-is
- if _, err := fmt.Fprintf(w, "%s\n", line); err != nil {
- sendErrorEvent("write_failed")
- return &openaiStreamingResult{usage: usage, firstTokenMs: firstTokenMs}, err
+ if !clientDisconnected {
+ if _, err := fmt.Fprintf(w, "%s\n", line); err != nil {
+ clientDisconnected = true
+ log.Printf("Client disconnected during streaming, continuing to drain upstream for billing")
+ } else {
+ flusher.Flush()
+ }
}
- flusher.Flush()
}
case <-intervalCh:
@@ -1118,6 +1130,10 @@ func (s *OpenAIGatewayService) handleStreamingResponse(ctx context.Context, resp
if time.Since(lastRead) < streamInterval {
continue
}
+ if clientDisconnected {
+ log.Printf("Upstream timeout after client disconnect, returning collected usage")
+ return &openaiStreamingResult{usage: usage, firstTokenMs: firstTokenMs}, nil
+ }
log.Printf("Stream data interval timeout: account=%d model=%s interval=%s", account.ID, originalModel, streamInterval)
// 处理流超时,可能标记账户为临时不可调度或错误状态
if s.rateLimitService != nil {
@@ -1127,11 +1143,16 @@ func (s *OpenAIGatewayService) handleStreamingResponse(ctx context.Context, resp
return &openaiStreamingResult{usage: usage, firstTokenMs: firstTokenMs}, fmt.Errorf("stream data interval timeout")
case <-keepaliveCh:
+ if clientDisconnected {
+ continue
+ }
if time.Since(lastDataAt) < keepaliveInterval {
continue
}
if _, err := fmt.Fprint(w, ":\n\n"); err != nil {
- return &openaiStreamingResult{usage: usage, firstTokenMs: firstTokenMs}, err
+ clientDisconnected = true
+ log.Printf("Client disconnected during streaming, continuing to drain upstream for billing")
+ continue
}
flusher.Flush()
}
diff --git a/backend/internal/service/openai_gateway_service_test.go b/backend/internal/service/openai_gateway_service_test.go
index ead6e143..3ec37544 100644
--- a/backend/internal/service/openai_gateway_service_test.go
+++ b/backend/internal/service/openai_gateway_service_test.go
@@ -38,6 +38,20 @@ type cancelReadCloser struct{}
func (c cancelReadCloser) Read(p []byte) (int, error) { return 0, context.Canceled }
func (c cancelReadCloser) Close() error { return nil }
+type failingGinWriter struct {
+ gin.ResponseWriter
+ failAfter int
+ writes int
+}
+
+func (w *failingGinWriter) Write(p []byte) (int, error) {
+ if w.writes >= w.failAfter {
+ return 0, errors.New("write failed")
+ }
+ w.writes++
+ return w.ResponseWriter.Write(p)
+}
+
func (c stubConcurrencyCache) AcquireAccountSlot(ctx context.Context, accountID int64, maxConcurrency int, requestID string) (bool, error) {
return true, nil
}
@@ -211,6 +225,51 @@ func TestOpenAIStreamingContextCanceledDoesNotInjectErrorEvent(t *testing.T) {
}
}
+func TestOpenAIStreamingClientDisconnectDrainsUpstreamUsage(t *testing.T) {
+ gin.SetMode(gin.TestMode)
+ cfg := &config.Config{
+ Gateway: config.GatewayConfig{
+ StreamDataIntervalTimeout: 0,
+ StreamKeepaliveInterval: 0,
+ MaxLineSize: defaultMaxLineSize,
+ },
+ }
+ svc := &OpenAIGatewayService{cfg: cfg}
+
+ rec := httptest.NewRecorder()
+ c, _ := gin.CreateTestContext(rec)
+ c.Request = httptest.NewRequest(http.MethodPost, "/", nil)
+ c.Writer = &failingGinWriter{ResponseWriter: c.Writer, failAfter: 0}
+
+ pr, pw := io.Pipe()
+ resp := &http.Response{
+ StatusCode: http.StatusOK,
+ Body: pr,
+ Header: http.Header{},
+ }
+
+ go func() {
+ defer func() { _ = pw.Close() }()
+ _, _ = pw.Write([]byte("data: {\"type\":\"response.in_progress\",\"response\":{}}\n\n"))
+ _, _ = pw.Write([]byte("data: {\"type\":\"response.completed\",\"response\":{\"usage\":{\"input_tokens\":3,\"output_tokens\":5,\"input_tokens_details\":{\"cached_tokens\":1}}}}\n\n"))
+ }()
+
+ result, err := svc.handleStreamingResponse(c.Request.Context(), resp, c, &Account{ID: 1}, time.Now(), "model", "model")
+ _ = pr.Close()
+ if err != nil {
+ t.Fatalf("expected nil error, got %v", err)
+ }
+ if result == nil || result.usage == nil {
+ t.Fatalf("expected usage result")
+ }
+ if result.usage.InputTokens != 3 || result.usage.OutputTokens != 5 || result.usage.CacheReadInputTokens != 1 {
+ t.Fatalf("unexpected usage: %+v", *result.usage)
+ }
+ if strings.Contains(rec.Body.String(), "event: error") || strings.Contains(rec.Body.String(), "write_failed") {
+ t.Fatalf("expected no injected SSE error event, got %q", rec.Body.String())
+ }
+}
+
func TestOpenAIStreamingTooLong(t *testing.T) {
gin.SetMode(gin.TestMode)
cfg := &config.Config{
From 65fd0d15ae0f5b1b454d27a02e7df3e8b5670b2d Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Fri, 16 Jan 2026 00:41:29 +0800
Subject: [PATCH 05/99] =?UTF-8?q?fix(=E7=BD=91=E5=85=B3):=20=E8=A1=A5?=
=?UTF-8?q?=E9=BD=90=E9=9D=9E=20Claude=20Code=20OAuth=20=E5=85=BC=E5=AE=B9?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
backend/internal/pkg/claude/constants.go | 6 +
backend/internal/service/account.go | 16 ++
backend/internal/service/gateway_service.go | 239 +++++++++++++++++---
3 files changed, 232 insertions(+), 29 deletions(-)
diff --git a/backend/internal/pkg/claude/constants.go b/backend/internal/pkg/claude/constants.go
index f60412c2..0c6e9b4c 100644
--- a/backend/internal/pkg/claude/constants.go
+++ b/backend/internal/pkg/claude/constants.go
@@ -15,6 +15,12 @@ const (
// DefaultBetaHeader Claude Code 客户端默认的 anthropic-beta header
const DefaultBetaHeader = BetaClaudeCode + "," + BetaOAuth + "," + BetaInterleavedThinking + "," + BetaFineGrainedToolStreaming
+// MessageBetaHeaderNoTools /v1/messages 在无工具时的 beta header
+const MessageBetaHeaderNoTools = BetaOAuth + "," + BetaInterleavedThinking
+
+// MessageBetaHeaderWithTools /v1/messages 在有工具时的 beta header
+const MessageBetaHeaderWithTools = BetaClaudeCode + "," + BetaOAuth + "," + BetaInterleavedThinking
+
// CountTokensBetaHeader count_tokens 请求使用的 anthropic-beta header
const CountTokensBetaHeader = BetaClaudeCode + "," + BetaOAuth + "," + BetaInterleavedThinking + "," + BetaTokenCounting
diff --git a/backend/internal/service/account.go b/backend/internal/service/account.go
index cfce9bfa..435eecd9 100644
--- a/backend/internal/service/account.go
+++ b/backend/internal/service/account.go
@@ -364,6 +364,22 @@ func (a *Account) GetExtraString(key string) string {
return ""
}
+func (a *Account) GetClaudeUserID() string {
+ if v := strings.TrimSpace(a.GetExtraString("claude_user_id")); v != "" {
+ return v
+ }
+ if v := strings.TrimSpace(a.GetExtraString("anthropic_user_id")); v != "" {
+ return v
+ }
+ if v := strings.TrimSpace(a.GetCredential("claude_user_id")); v != "" {
+ return v
+ }
+ if v := strings.TrimSpace(a.GetCredential("anthropic_user_id")); v != "" {
+ return v
+ }
+ return ""
+}
+
func (a *Account) IsCustomErrorCodesEnabled() bool {
if a.Type != AccountTypeAPIKey || a.Credentials == nil {
return false
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 904b5acd..790d9fa2 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -51,6 +51,9 @@ var (
toolNameCamelRe = regexp.MustCompile(`([a-z0-9])([A-Z])`)
toolNameFieldRe = regexp.MustCompile(`"name"\s*:\s*"([^"]+)"`)
modelFieldRe = regexp.MustCompile(`"model"\s*:\s*"([^"]+)"`)
+ toolDescAbsPathRe = regexp.MustCompile(`/\/?(?:home|Users|tmp|var|opt|usr|etc)\/[^\s,\)"'\]]+`)
+ toolDescWinPathRe = regexp.MustCompile(`(?i)[A-Z]:\\[^\s,\)"'\]]+`)
+ opencodeTextRe = regexp.MustCompile(`(?i)opencode`)
claudeToolNameOverrides = map[string]string{
"bash": "Bash",
@@ -451,6 +454,22 @@ func normalizeToolNameForClaude(name string, cache map[string]string) string {
}
func normalizeToolNameForOpenCode(name string, cache map[string]string) string {
+ if name == "" {
+ return name
+ }
+ stripped := stripToolPrefix(name)
+ if cache != nil {
+ if mapped, ok := cache[stripped]; ok {
+ return mapped
+ }
+ }
+ if mapped, ok := openCodeToolOverrides[stripped]; ok {
+ return mapped
+ }
+ return toSnakeCase(stripped)
+}
+
+func normalizeParamNameForOpenCode(name string, cache map[string]string) string {
if name == "" {
return name
}
@@ -459,10 +478,63 @@ func normalizeToolNameForOpenCode(name string, cache map[string]string) string {
return mapped
}
}
- if mapped, ok := openCodeToolOverrides[name]; ok {
- return mapped
+ return name
+}
+
+func sanitizeOpenCodeText(text string) string {
+ if text == "" {
+ return text
+ }
+ text = strings.ReplaceAll(text, "OpenCode", "Claude Code")
+ text = opencodeTextRe.ReplaceAllString(text, "Claude")
+ return text
+}
+
+func sanitizeToolDescription(description string) string {
+ if description == "" {
+ return description
+ }
+ description = toolDescAbsPathRe.ReplaceAllString(description, "[path]")
+ description = toolDescWinPathRe.ReplaceAllString(description, "[path]")
+ return sanitizeOpenCodeText(description)
+}
+
+func normalizeToolInputSchema(inputSchema any, cache map[string]string) {
+ schema, ok := inputSchema.(map[string]any)
+ if !ok {
+ return
+ }
+ properties, ok := schema["properties"].(map[string]any)
+ if !ok {
+ return
+ }
+
+ newProperties := make(map[string]any, len(properties))
+ for key, value := range properties {
+ snakeKey := toSnakeCase(key)
+ newProperties[snakeKey] = value
+ if snakeKey != key && cache != nil {
+ cache[snakeKey] = key
+ }
+ }
+ schema["properties"] = newProperties
+
+ if required, ok := schema["required"].([]any); ok {
+ newRequired := make([]any, 0, len(required))
+ for _, item := range required {
+ name, ok := item.(string)
+ if !ok {
+ newRequired = append(newRequired, item)
+ continue
+ }
+ snakeName := toSnakeCase(name)
+ newRequired = append(newRequired, snakeName)
+ if snakeName != name && cache != nil {
+ cache[snakeName] = name
+ }
+ }
+ schema["required"] = newRequired
}
- return toSnakeCase(name)
}
func stripCacheControlFromSystemBlocks(system any) bool {
@@ -479,9 +551,6 @@ func stripCacheControlFromSystemBlocks(system any) bool {
if _, exists := block["cache_control"]; !exists {
continue
}
- if text, ok := block["text"].(string); ok && text == claudeCodeSystemPrompt {
- continue
- }
delete(block, "cache_control")
changed = true
}
@@ -499,6 +568,34 @@ func normalizeClaudeOAuthRequestBody(body []byte, modelID string, opts claudeOAu
toolNameMap := make(map[string]string)
+ if system, ok := req["system"]; ok {
+ switch v := system.(type) {
+ case string:
+ sanitized := sanitizeOpenCodeText(v)
+ if sanitized != v {
+ req["system"] = sanitized
+ }
+ case []any:
+ for _, item := range v {
+ block, ok := item.(map[string]any)
+ if !ok {
+ continue
+ }
+ if blockType, _ := block["type"].(string); blockType != "text" {
+ continue
+ }
+ text, ok := block["text"].(string)
+ if !ok || text == "" {
+ continue
+ }
+ sanitized := sanitizeOpenCodeText(text)
+ if sanitized != text {
+ block["text"] = sanitized
+ }
+ }
+ }
+ }
+
if rawModel, ok := req["model"].(string); ok {
normalized := claude.NormalizeModelID(rawModel)
if normalized != rawModel {
@@ -521,6 +618,15 @@ func normalizeClaudeOAuthRequestBody(body []byte, modelID string, opts claudeOAu
toolMap["name"] = normalized
}
}
+ if desc, ok := toolMap["description"].(string); ok {
+ sanitized := sanitizeToolDescription(desc)
+ if sanitized != desc {
+ toolMap["description"] = sanitized
+ }
+ }
+ if schema, ok := toolMap["input_schema"]; ok {
+ normalizeToolInputSchema(schema, toolNameMap)
+ }
tools[idx] = toolMap
}
req["tools"] = tools
@@ -532,13 +638,15 @@ func normalizeClaudeOAuthRequestBody(body []byte, modelID string, opts claudeOAu
normalized = name
}
if toolMap, ok := value.(map[string]any); ok {
- if toolName, ok := toolMap["name"].(string); ok {
- mappedName := normalizeToolNameForClaude(toolName, toolNameMap)
- if mappedName != "" && mappedName != toolName {
- toolMap["name"] = mappedName
+ toolMap["name"] = normalized
+ if desc, ok := toolMap["description"].(string); ok {
+ sanitized := sanitizeToolDescription(desc)
+ if sanitized != desc {
+ toolMap["description"] = sanitized
}
- } else if normalized != name {
- toolMap["name"] = normalized
+ }
+ if schema, ok := toolMap["input_schema"]; ok {
+ normalizeToolInputSchema(schema, toolNameMap)
}
normalizedTools[normalized] = toolMap
continue
@@ -611,7 +719,7 @@ func normalizeClaudeOAuthRequestBody(body []byte, modelID string, opts claudeOAu
}
func (s *GatewayService) buildOAuthMetadataUserID(parsed *ParsedRequest, account *Account, fp *Fingerprint) string {
- if parsed == nil || fp == nil || fp.ClientID == "" {
+ if parsed == nil || account == nil {
return ""
}
if parsed.MetadataUserID != "" {
@@ -621,13 +729,22 @@ func (s *GatewayService) buildOAuthMetadataUserID(parsed *ParsedRequest, account
if accountUUID == "" {
return ""
}
+
+ userID := strings.TrimSpace(account.GetClaudeUserID())
+ if userID == "" && fp != nil {
+ userID = fp.ClientID
+ }
+ if userID == "" {
+ return ""
+ }
+
sessionHash := s.GenerateSessionHash(parsed)
sessionID := uuid.NewString()
if sessionHash != "" {
seed := fmt.Sprintf("%d::%s", account.ID, sessionHash)
sessionID = generateSessionUUID(seed)
}
- return fmt.Sprintf("user_%s_account_%s_session_%s", fp.ClientID, accountUUID, sessionID)
+ return fmt.Sprintf("user_%s_account_%s_session_%s", userID, accountUUID, sessionID)
}
func generateSessionUUID(seed string) string {
@@ -2213,7 +2330,11 @@ func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Contex
// 处理anthropic-beta header(OAuth账号需要特殊处理)
if tokenType == "oauth" && mimicClaudeCode {
- req.Header.Set("anthropic-beta", s.getBetaHeader(modelID, c.GetHeader("anthropic-beta")))
+ if requestHasTools(body) {
+ req.Header.Set("anthropic-beta", claude.MessageBetaHeaderWithTools)
+ } else {
+ req.Header.Set("anthropic-beta", claude.MessageBetaHeaderNoTools)
+ }
} else if s.cfg != nil && s.cfg.Gateway.InjectBetaForAPIKey && req.Header.Get("anthropic-beta") == "" {
// API-key:仅在请求显式使用 beta 特性且客户端未提供时,按需补齐(默认关闭)
if requestNeedsBetaFeatures(body) {
@@ -2284,6 +2405,20 @@ func requestNeedsBetaFeatures(body []byte) bool {
return false
}
+func requestHasTools(body []byte) bool {
+ tools := gjson.GetBytes(body, "tools")
+ if !tools.Exists() {
+ return false
+ }
+ if tools.IsArray() {
+ return len(tools.Array()) > 0
+ }
+ if tools.IsObject() {
+ return len(tools.Map()) > 0
+ }
+ return false
+}
+
func defaultAPIKeyBetaHeader(body []byte) string {
modelID := gjson.GetBytes(body, "model").String()
if strings.Contains(strings.ToLower(modelID), "haiku") {
@@ -2817,6 +2952,45 @@ func (s *GatewayService) replaceModelInSSELine(line, fromModel, toModel string)
return "data: " + string(newData)
}
+func rewriteParamKeysInValue(value any, cache map[string]string) (any, bool) {
+ switch v := value.(type) {
+ case map[string]any:
+ changed := false
+ rewritten := make(map[string]any, len(v))
+ for key, item := range v {
+ newKey := normalizeParamNameForOpenCode(key, cache)
+ newItem, childChanged := rewriteParamKeysInValue(item, cache)
+ if childChanged {
+ changed = true
+ }
+ if newKey != key {
+ changed = true
+ }
+ rewritten[newKey] = newItem
+ }
+ if !changed {
+ return value, false
+ }
+ return rewritten, true
+ case []any:
+ changed := false
+ rewritten := make([]any, len(v))
+ for idx, item := range v {
+ newItem, childChanged := rewriteParamKeysInValue(item, cache)
+ if childChanged {
+ changed = true
+ }
+ rewritten[idx] = newItem
+ }
+ if !changed {
+ return value, false
+ }
+ return rewritten, true
+ default:
+ return value, false
+ }
+}
+
func rewriteToolNamesInValue(value any, toolNameMap map[string]string) bool {
switch v := value.(type) {
case map[string]any:
@@ -2829,6 +3003,15 @@ func rewriteToolNamesInValue(value any, toolNameMap map[string]string) bool {
changed = true
}
}
+ if input, ok := v["input"].(map[string]any); ok {
+ rewrittenInput, inputChanged := rewriteParamKeysInValue(input, toolNameMap)
+ if inputChanged {
+ if m, ok := rewrittenInput.(map[string]any); ok {
+ v["input"] = m
+ changed = true
+ }
+ }
+ }
}
for _, item := range v {
if rewriteToolNamesInValue(item, toolNameMap) {
@@ -2877,6 +3060,15 @@ func replaceToolNamesInText(text string, toolNameMap map[string]string) string {
}
return strings.Replace(match, model, mapped, 1)
})
+
+ for mapped, original := range toolNameMap {
+ if mapped == "" || original == "" || mapped == original {
+ continue
+ }
+ output = strings.ReplaceAll(output, "\""+mapped+"\":", "\""+original+"\":")
+ output = strings.ReplaceAll(output, "\\\""+mapped+"\\\":", "\\\""+original+"\\\":")
+ }
+
return output
}
@@ -2889,22 +3081,11 @@ func (s *GatewayService) replaceToolNamesInSSELine(line string, toolNameMap map[
return line
}
- var event map[string]any
- if err := json.Unmarshal([]byte(data), &event); err != nil {
- replaced := replaceToolNamesInText(data, toolNameMap)
- if replaced == data {
- return line
- }
- return "data: " + replaced
- }
- if !rewriteToolNamesInValue(event, toolNameMap) {
+ replaced := replaceToolNamesInText(data, toolNameMap)
+ if replaced == data {
return line
}
- newData, err := json.Marshal(event)
- if err != nil {
- return line
- }
- return "data: " + string(newData)
+ return "data: " + replaced
}
func (s *GatewayService) parseSSEUsage(data string, usage *ClaudeUsage) {
From bd854e1750e568c4a02b3a276e68bcd6336f5368 Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Fri, 16 Jan 2026 23:15:52 +0800
Subject: [PATCH 06/99] =?UTF-8?q?fix(=E7=BD=91=E5=85=B3):=20Claude=20Code?=
=?UTF-8?q?=20OAuth=20=E8=A1=A5=E9=BD=90=20oauth=20beta?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
backend/internal/service/gateway_service.go | 34 ++++++++++++++++-----
1 file changed, 27 insertions(+), 7 deletions(-)
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 790d9fa2..aa811bf5 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -2328,12 +2328,19 @@ func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Contex
applyClaudeOAuthHeaderDefaults(req, reqStream)
}
- // 处理anthropic-beta header(OAuth账号需要特殊处理)
- if tokenType == "oauth" && mimicClaudeCode {
- if requestHasTools(body) {
- req.Header.Set("anthropic-beta", claude.MessageBetaHeaderWithTools)
+ // 处理 anthropic-beta header(OAuth 账号需要包含 oauth beta)
+ if tokenType == "oauth" {
+ if mimicClaudeCode {
+ // 非 Claude Code 客户端:按 Claude Code 规则生成 beta header
+ if requestHasTools(body) {
+ req.Header.Set("anthropic-beta", claude.MessageBetaHeaderWithTools)
+ } else {
+ req.Header.Set("anthropic-beta", claude.MessageBetaHeaderNoTools)
+ }
} else {
- req.Header.Set("anthropic-beta", claude.MessageBetaHeaderNoTools)
+ // Claude Code 客户端:尽量透传原始 header,仅补齐 oauth beta
+ clientBetaHeader := req.Header.Get("anthropic-beta")
+ req.Header.Set("anthropic-beta", s.getBetaHeader(modelID, clientBetaHeader))
}
} else if s.cfg != nil && s.cfg.Gateway.InjectBetaForAPIKey && req.Header.Get("anthropic-beta") == "" {
// API-key:仅在请求显式使用 beta 特性且客户端未提供时,按需补齐(默认关闭)
@@ -3576,8 +3583,21 @@ func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Con
}
// OAuth 账号:处理 anthropic-beta header
- if tokenType == "oauth" && mimicClaudeCode {
- req.Header.Set("anthropic-beta", claude.CountTokensBetaHeader)
+ if tokenType == "oauth" {
+ if mimicClaudeCode {
+ req.Header.Set("anthropic-beta", claude.CountTokensBetaHeader)
+ } else {
+ clientBetaHeader := req.Header.Get("anthropic-beta")
+ if clientBetaHeader == "" {
+ req.Header.Set("anthropic-beta", claude.CountTokensBetaHeader)
+ } else {
+ beta := s.getBetaHeader(modelID, clientBetaHeader)
+ if !strings.Contains(beta, claude.BetaTokenCounting) {
+ beta = beta + "," + claude.BetaTokenCounting
+ }
+ req.Header.Set("anthropic-beta", beta)
+ }
+ }
} else if s.cfg != nil && s.cfg.Gateway.InjectBetaForAPIKey && req.Header.Get("anthropic-beta") == "" {
// API-key:与 messages 同步的按需 beta 注入(默认关闭)
if requestNeedsBetaFeatures(body) {
From 2a7d04fec4f452bc20b73ab0fa04da9ef6fd7870 Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Thu, 15 Jan 2026 18:54:42 +0800
Subject: [PATCH 07/99] =?UTF-8?q?fix(=E7=BD=91=E5=85=B3):=20=E5=AF=B9?=
=?UTF-8?q?=E9=BD=90=20Claude=20OAuth=20=E8=AF=B7=E6=B1=82=E9=80=82?=
=?UTF-8?q?=E9=85=8D?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
backend/internal/pkg/claude/constants.go | 44 +-
backend/internal/service/gateway_service.go | 455 ++++++++++++++++++-
backend/internal/service/identity_service.go | 8 +-
3 files changed, 481 insertions(+), 26 deletions(-)
diff --git a/backend/internal/pkg/claude/constants.go b/backend/internal/pkg/claude/constants.go
index d1a56a84..15144881 100644
--- a/backend/internal/pkg/claude/constants.go
+++ b/backend/internal/pkg/claude/constants.go
@@ -25,15 +25,15 @@ const APIKeyHaikuBetaHeader = BetaInterleavedThinking
// DefaultHeaders 是 Claude Code 客户端默认请求头。
var DefaultHeaders = map[string]string{
- "User-Agent": "claude-cli/2.0.62 (external, cli)",
+ "User-Agent": "claude-cli/2.1.2 (external, cli)",
"X-Stainless-Lang": "js",
- "X-Stainless-Package-Version": "0.52.0",
+ "X-Stainless-Package-Version": "0.70.0",
"X-Stainless-OS": "Linux",
"X-Stainless-Arch": "x64",
"X-Stainless-Runtime": "node",
- "X-Stainless-Runtime-Version": "v22.14.0",
+ "X-Stainless-Runtime-Version": "v24.3.0",
"X-Stainless-Retry-Count": "0",
- "X-Stainless-Timeout": "60",
+ "X-Stainless-Timeout": "600",
"X-App": "cli",
"Anthropic-Dangerous-Direct-Browser-Access": "true",
}
@@ -79,3 +79,39 @@ func DefaultModelIDs() []string {
// DefaultTestModel 测试时使用的默认模型
const DefaultTestModel = "claude-sonnet-4-5-20250929"
+
+// ModelIDOverrides Claude OAuth 请求需要的模型 ID 映射
+var ModelIDOverrides = map[string]string{
+ "claude-sonnet-4-5": "claude-sonnet-4-5-20250929",
+ "claude-opus-4-5": "claude-opus-4-5-20251101",
+ "claude-haiku-4-5": "claude-haiku-4-5-20251001",
+}
+
+// ModelIDReverseOverrides 用于将上游模型 ID 还原为短名
+var ModelIDReverseOverrides = map[string]string{
+ "claude-sonnet-4-5-20250929": "claude-sonnet-4-5",
+ "claude-opus-4-5-20251101": "claude-opus-4-5",
+ "claude-haiku-4-5-20251001": "claude-haiku-4-5",
+}
+
+// NormalizeModelID 根据 Claude OAuth 规则映射模型
+func NormalizeModelID(id string) string {
+ if id == "" {
+ return id
+ }
+ if mapped, ok := ModelIDOverrides[id]; ok {
+ return mapped
+ }
+ return id
+}
+
+// DenormalizeModelID 将上游模型 ID 转换为短名
+func DenormalizeModelID(id string) string {
+ if id == "" {
+ return id
+ }
+ if mapped, ok := ModelIDReverseOverrides[id]; ok {
+ return mapped
+ }
+ return id
+}
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 1e3221d3..899a0fc5 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -18,12 +18,14 @@ import (
"strings"
"sync/atomic"
"time"
+ "unicode"
"github.com/Wei-Shaw/sub2api/internal/config"
"github.com/Wei-Shaw/sub2api/internal/pkg/claude"
"github.com/Wei-Shaw/sub2api/internal/pkg/ctxkey"
"github.com/Wei-Shaw/sub2api/internal/util/responseheaders"
"github.com/Wei-Shaw/sub2api/internal/util/urlvalidator"
+ "github.com/google/uuid"
"github.com/tidwall/gjson"
"github.com/tidwall/sjson"
@@ -60,6 +62,36 @@ var (
sseDataRe = regexp.MustCompile(`^data:\s*`)
sessionIDRegex = regexp.MustCompile(`session_([a-f0-9-]{36})`)
claudeCliUserAgentRe = regexp.MustCompile(`^claude-cli/\d+\.\d+\.\d+`)
+ toolPrefixRe = regexp.MustCompile(`(?i)^(?:oc_|mcp_)`)
+ toolNameBoundaryRe = regexp.MustCompile(`[^a-zA-Z0-9]+`)
+ toolNameCamelRe = regexp.MustCompile(`([a-z0-9])([A-Z])`)
+
+ claudeToolNameOverrides = map[string]string{
+ "bash": "Bash",
+ "read": "Read",
+ "edit": "Edit",
+ "write": "Write",
+ "task": "Task",
+ "glob": "Glob",
+ "grep": "Grep",
+ "webfetch": "WebFetch",
+ "websearch": "WebSearch",
+ "todowrite": "TodoWrite",
+ "question": "AskUserQuestion",
+ }
+ openCodeToolOverrides = map[string]string{
+ "Bash": "bash",
+ "Read": "read",
+ "Edit": "edit",
+ "Write": "write",
+ "Task": "task",
+ "Glob": "glob",
+ "Grep": "grep",
+ "WebFetch": "webfetch",
+ "WebSearch": "websearch",
+ "TodoWrite": "todowrite",
+ "AskUserQuestion": "question",
+ }
// claudeCodePromptPrefixes 用于检测 Claude Code 系统提示词的前缀列表
// 支持多种变体:标准版、Agent SDK 版、Explore Agent 版、Compact 版等
@@ -365,6 +397,268 @@ func (s *GatewayService) replaceModelInBody(body []byte, newModel string) []byte
return newBody
}
+type claudeOAuthNormalizeOptions struct {
+ injectMetadata bool
+ metadataUserID string
+ stripSystemCacheControl bool
+}
+
+func stripToolPrefix(value string) string {
+ if value == "" {
+ return value
+ }
+ return toolPrefixRe.ReplaceAllString(value, "")
+}
+
+func toPascalCase(value string) string {
+ if value == "" {
+ return value
+ }
+ normalized := toolNameBoundaryRe.ReplaceAllString(value, " ")
+ tokens := make([]string, 0)
+ for _, token := range strings.Fields(normalized) {
+ expanded := toolNameCamelRe.ReplaceAllString(token, "$1 $2")
+ parts := strings.Fields(expanded)
+ if len(parts) > 0 {
+ tokens = append(tokens, parts...)
+ }
+ }
+ if len(tokens) == 0 {
+ return value
+ }
+ var builder strings.Builder
+ for _, token := range tokens {
+ lower := strings.ToLower(token)
+ if lower == "" {
+ continue
+ }
+ runes := []rune(lower)
+ runes[0] = unicode.ToUpper(runes[0])
+ builder.WriteString(string(runes))
+ }
+ return builder.String()
+}
+
+func toSnakeCase(value string) string {
+ if value == "" {
+ return value
+ }
+ output := toolNameCamelRe.ReplaceAllString(value, "$1_$2")
+ output = toolNameBoundaryRe.ReplaceAllString(output, "_")
+ output = strings.Trim(output, "_")
+ return strings.ToLower(output)
+}
+
+func normalizeToolNameForClaude(name string, cache map[string]string) string {
+ if name == "" {
+ return name
+ }
+ stripped := stripToolPrefix(name)
+ mapped, ok := claudeToolNameOverrides[strings.ToLower(stripped)]
+ if !ok {
+ mapped = toPascalCase(stripped)
+ }
+ if mapped != "" && cache != nil && mapped != stripped {
+ cache[mapped] = stripped
+ }
+ if mapped == "" {
+ return stripped
+ }
+ return mapped
+}
+
+func normalizeToolNameForOpenCode(name string, cache map[string]string) string {
+ if name == "" {
+ return name
+ }
+ if cache != nil {
+ if mapped, ok := cache[name]; ok {
+ return mapped
+ }
+ }
+ if mapped, ok := openCodeToolOverrides[name]; ok {
+ return mapped
+ }
+ return toSnakeCase(name)
+}
+
+func stripCacheControlFromSystemBlocks(system any) bool {
+ blocks, ok := system.([]any)
+ if !ok {
+ return false
+ }
+ changed := false
+ for _, item := range blocks {
+ block, ok := item.(map[string]any)
+ if !ok {
+ continue
+ }
+ if _, exists := block["cache_control"]; !exists {
+ continue
+ }
+ if text, ok := block["text"].(string); ok && text == claudeCodeSystemPrompt {
+ continue
+ }
+ delete(block, "cache_control")
+ changed = true
+ }
+ return changed
+}
+
+func normalizeClaudeOAuthRequestBody(body []byte, modelID string, opts claudeOAuthNormalizeOptions) ([]byte, string, map[string]string) {
+ if len(body) == 0 {
+ return body, modelID, nil
+ }
+ var req map[string]any
+ if err := json.Unmarshal(body, &req); err != nil {
+ return body, modelID, nil
+ }
+
+ toolNameMap := make(map[string]string)
+
+ if rawModel, ok := req["model"].(string); ok {
+ normalized := claude.NormalizeModelID(rawModel)
+ if normalized != rawModel {
+ req["model"] = normalized
+ modelID = normalized
+ }
+ }
+
+ if rawTools, exists := req["tools"]; exists {
+ switch tools := rawTools.(type) {
+ case []any:
+ for idx, tool := range tools {
+ toolMap, ok := tool.(map[string]any)
+ if !ok {
+ continue
+ }
+ if name, ok := toolMap["name"].(string); ok {
+ normalized := normalizeToolNameForClaude(name, toolNameMap)
+ if normalized != "" && normalized != name {
+ toolMap["name"] = normalized
+ }
+ }
+ tools[idx] = toolMap
+ }
+ req["tools"] = tools
+ case map[string]any:
+ normalizedTools := make(map[string]any, len(tools))
+ for name, value := range tools {
+ normalized := normalizeToolNameForClaude(name, toolNameMap)
+ if normalized == "" {
+ normalized = name
+ }
+ if toolMap, ok := value.(map[string]any); ok {
+ if toolName, ok := toolMap["name"].(string); ok {
+ mappedName := normalizeToolNameForClaude(toolName, toolNameMap)
+ if mappedName != "" && mappedName != toolName {
+ toolMap["name"] = mappedName
+ }
+ } else if normalized != name {
+ toolMap["name"] = normalized
+ }
+ normalizedTools[normalized] = toolMap
+ continue
+ }
+ normalizedTools[normalized] = value
+ }
+ req["tools"] = normalizedTools
+ }
+ } else {
+ req["tools"] = []any{}
+ }
+
+ if messages, ok := req["messages"].([]any); ok {
+ for _, msg := range messages {
+ msgMap, ok := msg.(map[string]any)
+ if !ok {
+ continue
+ }
+ content, ok := msgMap["content"].([]any)
+ if !ok {
+ continue
+ }
+ for _, block := range content {
+ blockMap, ok := block.(map[string]any)
+ if !ok {
+ continue
+ }
+ if blockType, _ := blockMap["type"].(string); blockType != "tool_use" {
+ continue
+ }
+ if name, ok := blockMap["name"].(string); ok {
+ normalized := normalizeToolNameForClaude(name, toolNameMap)
+ if normalized != "" && normalized != name {
+ blockMap["name"] = normalized
+ }
+ }
+ }
+ }
+ }
+
+ if opts.stripSystemCacheControl {
+ if system, ok := req["system"]; ok {
+ _ = stripCacheControlFromSystemBlocks(system)
+ }
+ }
+
+ if opts.injectMetadata && opts.metadataUserID != "" {
+ metadata, ok := req["metadata"].(map[string]any)
+ if !ok {
+ metadata = map[string]any{}
+ req["metadata"] = metadata
+ }
+ if existing, ok := metadata["user_id"].(string); !ok || existing == "" {
+ metadata["user_id"] = opts.metadataUserID
+ }
+ }
+
+ if _, ok := req["temperature"]; ok {
+ delete(req, "temperature")
+ }
+ if _, ok := req["tool_choice"]; ok {
+ delete(req, "tool_choice")
+ }
+
+ newBody, err := json.Marshal(req)
+ if err != nil {
+ return body, modelID, toolNameMap
+ }
+ return newBody, modelID, toolNameMap
+}
+
+func (s *GatewayService) buildOAuthMetadataUserID(parsed *ParsedRequest, account *Account, fp *Fingerprint) string {
+ if parsed == nil || fp == nil || fp.ClientID == "" {
+ return ""
+ }
+ if parsed.MetadataUserID != "" {
+ return ""
+ }
+ accountUUID := account.GetExtraString("account_uuid")
+ if accountUUID == "" {
+ return ""
+ }
+ sessionHash := s.GenerateSessionHash(parsed)
+ sessionID := uuid.NewString()
+ if sessionHash != "" {
+ seed := fmt.Sprintf("%d::%s", account.ID, sessionHash)
+ sessionID = generateSessionUUID(seed)
+ }
+ return fmt.Sprintf("user_%s_account_%s_session_%s", fp.ClientID, accountUUID, sessionID)
+}
+
+func generateSessionUUID(seed string) string {
+ if seed == "" {
+ return uuid.NewString()
+ }
+ hash := sha256.Sum256([]byte(seed))
+ bytes := hash[:16]
+ bytes[6] = (bytes[6] & 0x0f) | 0x40
+ bytes[8] = (bytes[8] & 0x3f) | 0x80
+ return fmt.Sprintf("%x-%x-%x-%x-%x",
+ bytes[0:4], bytes[4:6], bytes[6:8], bytes[8:10], bytes[10:16])
+}
+
// SelectAccount 选择账号(粘性会话+优先级)
func (s *GatewayService) SelectAccount(ctx context.Context, groupID *int64, sessionHash string) (*Account, error) {
return s.SelectAccountForModel(ctx, groupID, sessionHash, "")
@@ -1906,21 +2200,36 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
body := parsed.Body
reqModel := parsed.Model
reqStream := parsed.Stream
+ originalModel := reqModel
+ var toolNameMap map[string]string
- // 智能注入 Claude Code 系统提示词(仅 OAuth/SetupToken 账号需要)
- // 条件:1) OAuth/SetupToken 账号 2) 不是 Claude Code 客户端 3) 不是 Haiku 模型 4) system 中还没有 Claude Code 提示词
- if account.IsOAuth() &&
- !isClaudeCodeClient(c.GetHeader("User-Agent"), parsed.MetadataUserID) &&
- !strings.Contains(strings.ToLower(reqModel), "haiku") &&
- !systemIncludesClaudeCodePrompt(parsed.System) {
- body = injectClaudeCodePrompt(body, parsed.System)
+ if account.IsOAuth() {
+ // 智能注入 Claude Code 系统提示词(仅 OAuth/SetupToken 账号需要)
+ // 条件:1) OAuth/SetupToken 账号 2) 不是 Claude Code 客户端 3) 不是 Haiku 模型 4) system 中还没有 Claude Code 提示词
+ if !isClaudeCodeClient(c.GetHeader("User-Agent"), parsed.MetadataUserID) &&
+ !strings.Contains(strings.ToLower(reqModel), "haiku") &&
+ !systemIncludesClaudeCodePrompt(parsed.System) {
+ body = injectClaudeCodePrompt(body, parsed.System)
+ }
+
+ normalizeOpts := claudeOAuthNormalizeOptions{stripSystemCacheControl: true}
+ if s.identityService != nil {
+ fp, err := s.identityService.GetOrCreateFingerprint(ctx, account.ID, c.Request.Header)
+ if err == nil && fp != nil {
+ if metadataUserID := s.buildOAuthMetadataUserID(parsed, account, fp); metadataUserID != "" {
+ normalizeOpts.injectMetadata = true
+ normalizeOpts.metadataUserID = metadataUserID
+ }
+ }
+ }
+
+ body, reqModel, toolNameMap = normalizeClaudeOAuthRequestBody(body, reqModel, normalizeOpts)
}
// 强制执行 cache_control 块数量限制(最多 4 个)
body = enforceCacheControlLimit(body)
// 应用模型映射(仅对apikey类型账号)
- originalModel := reqModel
if account.Type == AccountTypeAPIKey {
mappedModel := account.GetMappedModel(reqModel)
if mappedModel != reqModel {
@@ -1948,10 +2257,9 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
retryStart := time.Now()
for attempt := 1; attempt <= maxRetryAttempts; attempt++ {
// 构建上游请求(每次重试需要重新构建,因为请求体需要重新读取)
- upstreamReq, err := s.buildUpstreamRequest(ctx, c, account, body, token, tokenType, reqModel)
+ upstreamReq, err := s.buildUpstreamRequest(ctx, c, account, body, token, tokenType, reqModel, reqStream)
// Capture upstream request body for ops retry of this attempt.
c.Set(OpsUpstreamRequestBodyKey, string(body))
-
if err != nil {
return nil, err
}
@@ -2029,7 +2337,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
// also downgrade tool_use/tool_result blocks to text.
filteredBody := FilterThinkingBlocksForRetry(body)
- retryReq, buildErr := s.buildUpstreamRequest(ctx, c, account, filteredBody, token, tokenType, reqModel)
+ retryReq, buildErr := s.buildUpstreamRequest(ctx, c, account, filteredBody, token, tokenType, reqModel, reqStream)
if buildErr == nil {
retryResp, retryErr := s.httpUpstream.Do(retryReq, proxyURL, account.ID, account.Concurrency)
if retryErr == nil {
@@ -2061,7 +2369,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
if looksLikeToolSignatureError(msg2) && time.Since(retryStart) < maxRetryElapsed {
log.Printf("Account %d: signature retry still failing and looks tool-related, retrying with tool blocks downgraded", account.ID)
filteredBody2 := FilterSignatureSensitiveBlocksForRetry(body)
- retryReq2, buildErr2 := s.buildUpstreamRequest(ctx, c, account, filteredBody2, token, tokenType, reqModel)
+ retryReq2, buildErr2 := s.buildUpstreamRequest(ctx, c, account, filteredBody2, token, tokenType, reqModel, reqStream)
if buildErr2 == nil {
retryResp2, retryErr2 := s.httpUpstream.Do(retryReq2, proxyURL, account.ID, account.Concurrency)
if retryErr2 == nil {
@@ -2278,7 +2586,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
var firstTokenMs *int
var clientDisconnect bool
if reqStream {
- streamResult, err := s.handleStreamingResponse(ctx, resp, c, account, startTime, originalModel, reqModel)
+ streamResult, err := s.handleStreamingResponse(ctx, resp, c, account, startTime, originalModel, reqModel, toolNameMap)
if err != nil {
if err.Error() == "have error in stream" {
return nil, &UpstreamFailoverError{
@@ -2291,7 +2599,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
firstTokenMs = streamResult.firstTokenMs
clientDisconnect = streamResult.clientDisconnect
} else {
- usage, err = s.handleNonStreamingResponse(ctx, resp, c, account, originalModel, reqModel)
+ usage, err = s.handleNonStreamingResponse(ctx, resp, c, account, originalModel, reqModel, toolNameMap)
if err != nil {
return nil, err
}
@@ -2308,7 +2616,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
}, nil
}
-func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Context, account *Account, body []byte, token, tokenType, modelID string) (*http.Request, error) {
+func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Context, account *Account, body []byte, token, tokenType, modelID string, reqStream bool) (*http.Request, error) {
// 确定目标URL
targetURL := claudeAPIURL
if account.Type == AccountTypeAPIKey {
@@ -2377,6 +2685,9 @@ func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Contex
if req.Header.Get("anthropic-version") == "" {
req.Header.Set("anthropic-version", "2023-06-01")
}
+ if tokenType == "oauth" {
+ applyClaudeOAuthHeaderDefaults(req, reqStream)
+ }
// 处理anthropic-beta header(OAuth账号需要特殊处理)
if tokenType == "oauth" {
@@ -2459,6 +2770,26 @@ func defaultAPIKeyBetaHeader(body []byte) string {
return claude.APIKeyBetaHeader
}
+func applyClaudeOAuthHeaderDefaults(req *http.Request, isStream bool) {
+ if req == nil {
+ return
+ }
+ if req.Header.Get("accept") == "" {
+ req.Header.Set("accept", "application/json")
+ }
+ for key, value := range claude.DefaultHeaders {
+ if value == "" {
+ continue
+ }
+ if req.Header.Get(key) == "" {
+ req.Header.Set(key, value)
+ }
+ }
+ if isStream && req.Header.Get("x-stainless-helper-method") == "" {
+ req.Header.Set("x-stainless-helper-method", "stream")
+ }
+}
+
func truncateForLog(b []byte, maxBytes int) string {
if maxBytes <= 0 {
maxBytes = 2048
@@ -2739,7 +3070,7 @@ type streamingResult struct {
clientDisconnect bool // 客户端是否在流式传输过程中断开
}
-func (s *GatewayService) handleStreamingResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account, startTime time.Time, originalModel, mappedModel string) (*streamingResult, error) {
+func (s *GatewayService) handleStreamingResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account, startTime time.Time, originalModel, mappedModel string, toolNameMap map[string]string) (*streamingResult, error) {
// 更新5h窗口状态
s.rateLimitService.UpdateSessionWindow(ctx, account, resp.Header)
@@ -2832,6 +3163,7 @@ func (s *GatewayService) handleStreamingResponse(ctx context.Context, resp *http
}
needModelReplace := originalModel != mappedModel
+ rewriteTools := account.IsOAuth()
clientDisconnected := false // 客户端断开标志,断开后继续读取上游以获取完整usage
for {
@@ -2873,11 +3205,14 @@ func (s *GatewayService) handleStreamingResponse(ctx context.Context, resp *http
// Extract data from SSE line (supports both "data: " and "data:" formats)
var data string
if sseDataRe.MatchString(line) {
- data = sseDataRe.ReplaceAllString(line, "")
// 如果有模型映射,替换响应中的model字段
if needModelReplace {
line = s.replaceModelInSSELine(line, mappedModel, originalModel)
}
+ if rewriteTools {
+ line = s.replaceToolNamesInSSELine(line, toolNameMap)
+ }
+ data = sseDataRe.ReplaceAllString(line, "")
}
// 写入客户端(统一处理 data 行和非 data 行)
@@ -2960,6 +3295,61 @@ func (s *GatewayService) replaceModelInSSELine(line, fromModel, toModel string)
return "data: " + string(newData)
}
+func rewriteToolNamesInValue(value any, toolNameMap map[string]string) bool {
+ switch v := value.(type) {
+ case map[string]any:
+ changed := false
+ if blockType, _ := v["type"].(string); blockType == "tool_use" {
+ if name, ok := v["name"].(string); ok {
+ mapped := normalizeToolNameForOpenCode(name, toolNameMap)
+ if mapped != name {
+ v["name"] = mapped
+ changed = true
+ }
+ }
+ }
+ for _, item := range v {
+ if rewriteToolNamesInValue(item, toolNameMap) {
+ changed = true
+ }
+ }
+ return changed
+ case []any:
+ changed := false
+ for _, item := range v {
+ if rewriteToolNamesInValue(item, toolNameMap) {
+ changed = true
+ }
+ }
+ return changed
+ default:
+ return false
+ }
+}
+
+func (s *GatewayService) replaceToolNamesInSSELine(line string, toolNameMap map[string]string) string {
+ if !sseDataRe.MatchString(line) {
+ return line
+ }
+ data := sseDataRe.ReplaceAllString(line, "")
+ if data == "" || data == "[DONE]" {
+ return line
+ }
+
+ var event map[string]any
+ if err := json.Unmarshal([]byte(data), &event); err != nil {
+ return line
+ }
+ if !rewriteToolNamesInValue(event, toolNameMap) {
+ return line
+ }
+ newData, err := json.Marshal(event)
+ if err != nil {
+ return line
+ }
+ return "data: " + string(newData)
+}
+
func (s *GatewayService) parseSSEUsage(data string, usage *ClaudeUsage) {
// 解析message_start获取input tokens(标准Claude API格式)
var msgStart struct {
@@ -3001,7 +3391,7 @@ func (s *GatewayService) parseSSEUsage(data string, usage *ClaudeUsage) {
}
}
-func (s *GatewayService) handleNonStreamingResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account, originalModel, mappedModel string) (*ClaudeUsage, error) {
+func (s *GatewayService) handleNonStreamingResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account, originalModel, mappedModel string, toolNameMap map[string]string) (*ClaudeUsage, error) {
// 更新5h窗口状态
s.rateLimitService.UpdateSessionWindow(ctx, account, resp.Header)
@@ -3022,6 +3412,9 @@ func (s *GatewayService) handleNonStreamingResponse(ctx context.Context, resp *h
if originalModel != mappedModel {
body = s.replaceModelInResponseBody(body, mappedModel, originalModel)
}
+ if account.IsOAuth() {
+ body = s.replaceToolNamesInResponseBody(body, toolNameMap)
+ }
responseheaders.WriteFilteredHeaders(c.Writer.Header(), resp.Header, s.cfg.Security.ResponseHeaders)
@@ -3059,6 +3452,24 @@ func (s *GatewayService) replaceModelInResponseBody(body []byte, fromModel, toMo
return newBody
}
+func (s *GatewayService) replaceToolNamesInResponseBody(body []byte, toolNameMap map[string]string) []byte {
+ if len(body) == 0 {
+ return body
+ }
+ var resp map[string]any
+ if err := json.Unmarshal(body, &resp); err != nil {
+ return body
+ }
+ if !rewriteToolNamesInValue(resp, toolNameMap) {
+ return body
+ }
+ newBody, err := json.Marshal(resp)
+ if err != nil {
+ return body
+ }
+ return newBody
+}
+
// RecordUsageInput 记录使用量的输入参数
type RecordUsageInput struct {
Result *ForwardResult
@@ -3224,6 +3635,11 @@ func (s *GatewayService) ForwardCountTokens(ctx context.Context, c *gin.Context,
body := parsed.Body
reqModel := parsed.Model
+ if account.IsOAuth() {
+ normalizeOpts := claudeOAuthNormalizeOptions{stripSystemCacheControl: true}
+ body, reqModel, _ = normalizeClaudeOAuthRequestBody(body, reqModel, normalizeOpts)
+ }
+
// Antigravity 账户不支持 count_tokens 转发,直接返回空值
if account.Platform == PlatformAntigravity {
c.JSON(http.StatusOK, gin.H{"input_tokens": 0})
@@ -3412,6 +3828,9 @@ func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Con
if req.Header.Get("anthropic-version") == "" {
req.Header.Set("anthropic-version", "2023-06-01")
}
+ if tokenType == "oauth" {
+ applyClaudeOAuthHeaderDefaults(req, false)
+ }
// OAuth 账号:处理 anthropic-beta header
if tokenType == "oauth" {
diff --git a/backend/internal/service/identity_service.go b/backend/internal/service/identity_service.go
index 1ffa8057..4ab1ab96 100644
--- a/backend/internal/service/identity_service.go
+++ b/backend/internal/service/identity_service.go
@@ -24,13 +24,13 @@ var (
// 默认指纹值(当客户端未提供时使用)
var defaultFingerprint = Fingerprint{
- UserAgent: "claude-cli/2.0.62 (external, cli)",
+ UserAgent: "claude-cli/2.1.2 (external, cli)",
StainlessLang: "js",
- StainlessPackageVersion: "0.52.0",
+ StainlessPackageVersion: "0.70.0",
StainlessOS: "Linux",
StainlessArch: "x64",
StainlessRuntime: "node",
- StainlessRuntimeVersion: "v22.14.0",
+ StainlessRuntimeVersion: "v24.3.0",
}
// Fingerprint represents account fingerprint data
@@ -230,7 +230,7 @@ func generateUUIDFromSeed(seed string) string {
}
// parseUserAgentVersion 解析user-agent版本号
-// 例如:claude-cli/2.0.62 -> (2, 0, 62)
+// 例如:claude-cli/2.1.2 -> (2, 1, 2)
func parseUserAgentVersion(ua string) (major, minor, patch int, ok bool) {
// 匹配 xxx/x.y.z 格式
matches := userAgentVersionRegex.FindStringSubmatch(ua)
From b8c48fb4775785e4bb607585d2f77fde03444fcc Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Thu, 15 Jan 2026 19:17:07 +0800
Subject: [PATCH 08/99] =?UTF-8?q?fix(=E7=BD=91=E5=85=B3):=20=E5=8C=BA?=
=?UTF-8?q?=E5=88=86=20Claude=20Code=20OAuth=20=E9=80=82=E9=85=8D?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
backend/internal/handler/gateway_handler.go | 3 +
backend/internal/pkg/claude/constants.go | 4 +
backend/internal/service/gateway_service.go | 110 +++++++++++++++-----
3 files changed, 90 insertions(+), 27 deletions(-)
diff --git a/backend/internal/handler/gateway_handler.go b/backend/internal/handler/gateway_handler.go
index b60618a8..91d590bf 100644
--- a/backend/internal/handler/gateway_handler.go
+++ b/backend/internal/handler/gateway_handler.go
@@ -707,6 +707,9 @@ func (h *GatewayHandler) CountTokens(c *gin.Context) {
return
}
+ // 检查是否为 Claude Code 客户端,设置到 context 中
+ SetClaudeCodeClientContext(c, body)
+
setOpsRequestContext(c, "", false, body)
parsedReq, err := service.ParseGatewayRequest(body)
diff --git a/backend/internal/pkg/claude/constants.go b/backend/internal/pkg/claude/constants.go
index 15144881..f60412c2 100644
--- a/backend/internal/pkg/claude/constants.go
+++ b/backend/internal/pkg/claude/constants.go
@@ -9,11 +9,15 @@ const (
BetaClaudeCode = "claude-code-20250219"
BetaInterleavedThinking = "interleaved-thinking-2025-05-14"
BetaFineGrainedToolStreaming = "fine-grained-tool-streaming-2025-05-14"
+ BetaTokenCounting = "token-counting-2024-11-01"
)
// DefaultBetaHeader Claude Code 客户端默认的 anthropic-beta header
const DefaultBetaHeader = BetaClaudeCode + "," + BetaOAuth + "," + BetaInterleavedThinking + "," + BetaFineGrainedToolStreaming
+// CountTokensBetaHeader count_tokens 请求使用的 anthropic-beta header
+const CountTokensBetaHeader = BetaClaudeCode + "," + BetaOAuth + "," + BetaInterleavedThinking + "," + BetaTokenCounting
+
// HaikuBetaHeader Haiku 模型使用的 anthropic-beta header(不需要 claude-code beta)
const HaikuBetaHeader = BetaOAuth + "," + BetaInterleavedThinking
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 899a0fc5..93dc59dc 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -65,6 +65,8 @@ var (
toolPrefixRe = regexp.MustCompile(`(?i)^(?:oc_|mcp_)`)
toolNameBoundaryRe = regexp.MustCompile(`[^a-zA-Z0-9]+`)
toolNameCamelRe = regexp.MustCompile(`([a-z0-9])([A-Z])`)
+ toolNameFieldRe = regexp.MustCompile(`"name"\s*:\s*"([^"]+)"`)
+ modelFieldRe = regexp.MustCompile(`"model"\s*:\s*"([^"]+)"`)
claudeToolNameOverrides = map[string]string{
"bash": "Bash",
@@ -1941,6 +1943,16 @@ func isClaudeCodeClient(userAgent string, metadataUserID string) bool {
return claudeCliUserAgentRe.MatchString(userAgent)
}
+func isClaudeCodeRequest(ctx context.Context, c *gin.Context, parsed *ParsedRequest) bool {
+ if IsClaudeCodeClient(ctx) {
+ return true
+ }
+ if parsed == nil || c == nil {
+ return false
+ }
+ return isClaudeCodeClient(c.GetHeader("User-Agent"), parsed.MetadataUserID)
+}
+
// systemIncludesClaudeCodePrompt 检查 system 中是否已包含 Claude Code 提示词
// 使用前缀匹配支持多种变体(标准版、Agent SDK 版等)
func systemIncludesClaudeCodePrompt(system any) bool {
@@ -2203,11 +2215,13 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
originalModel := reqModel
var toolNameMap map[string]string
- if account.IsOAuth() {
+ isClaudeCode := isClaudeCodeRequest(ctx, c, parsed)
+ shouldMimicClaudeCode := account.IsOAuth() && !isClaudeCode
+
+ if shouldMimicClaudeCode {
// 智能注入 Claude Code 系统提示词(仅 OAuth/SetupToken 账号需要)
// 条件:1) OAuth/SetupToken 账号 2) 不是 Claude Code 客户端 3) 不是 Haiku 模型 4) system 中还没有 Claude Code 提示词
- if !isClaudeCodeClient(c.GetHeader("User-Agent"), parsed.MetadataUserID) &&
- !strings.Contains(strings.ToLower(reqModel), "haiku") &&
+ if !strings.Contains(strings.ToLower(reqModel), "haiku") &&
!systemIncludesClaudeCodePrompt(parsed.System) {
body = injectClaudeCodePrompt(body, parsed.System)
}
@@ -2257,7 +2271,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
retryStart := time.Now()
for attempt := 1; attempt <= maxRetryAttempts; attempt++ {
// 构建上游请求(每次重试需要重新构建,因为请求体需要重新读取)
- upstreamReq, err := s.buildUpstreamRequest(ctx, c, account, body, token, tokenType, reqModel, reqStream)
+ upstreamReq, err := s.buildUpstreamRequest(ctx, c, account, body, token, tokenType, reqModel, reqStream, shouldMimicClaudeCode)
// Capture upstream request body for ops retry of this attempt.
c.Set(OpsUpstreamRequestBodyKey, string(body))
if err != nil {
@@ -2337,7 +2351,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
// also downgrade tool_use/tool_result blocks to text.
filteredBody := FilterThinkingBlocksForRetry(body)
- retryReq, buildErr := s.buildUpstreamRequest(ctx, c, account, filteredBody, token, tokenType, reqModel, reqStream)
+ retryReq, buildErr := s.buildUpstreamRequest(ctx, c, account, filteredBody, token, tokenType, reqModel, reqStream, shouldMimicClaudeCode)
if buildErr == nil {
retryResp, retryErr := s.httpUpstream.Do(retryReq, proxyURL, account.ID, account.Concurrency)
if retryErr == nil {
@@ -2369,7 +2383,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
if looksLikeToolSignatureError(msg2) && time.Since(retryStart) < maxRetryElapsed {
log.Printf("Account %d: signature retry still failing and looks tool-related, retrying with tool blocks downgraded", account.ID)
filteredBody2 := FilterSignatureSensitiveBlocksForRetry(body)
- retryReq2, buildErr2 := s.buildUpstreamRequest(ctx, c, account, filteredBody2, token, tokenType, reqModel, reqStream)
+ retryReq2, buildErr2 := s.buildUpstreamRequest(ctx, c, account, filteredBody2, token, tokenType, reqModel, reqStream, shouldMimicClaudeCode)
if buildErr2 == nil {
retryResp2, retryErr2 := s.httpUpstream.Do(retryReq2, proxyURL, account.ID, account.Concurrency)
if retryErr2 == nil {
@@ -2586,7 +2600,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
var firstTokenMs *int
var clientDisconnect bool
if reqStream {
- streamResult, err := s.handleStreamingResponse(ctx, resp, c, account, startTime, originalModel, reqModel, toolNameMap)
+ streamResult, err := s.handleStreamingResponse(ctx, resp, c, account, startTime, originalModel, reqModel, toolNameMap, shouldMimicClaudeCode)
if err != nil {
if err.Error() == "have error in stream" {
return nil, &UpstreamFailoverError{
@@ -2599,7 +2613,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
firstTokenMs = streamResult.firstTokenMs
clientDisconnect = streamResult.clientDisconnect
} else {
- usage, err = s.handleNonStreamingResponse(ctx, resp, c, account, originalModel, reqModel, toolNameMap)
+ usage, err = s.handleNonStreamingResponse(ctx, resp, c, account, originalModel, reqModel, toolNameMap, shouldMimicClaudeCode)
if err != nil {
return nil, err
}
@@ -2616,7 +2630,7 @@ func (s *GatewayService) Forward(ctx context.Context, c *gin.Context, account *A
}, nil
}
-func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Context, account *Account, body []byte, token, tokenType, modelID string, reqStream bool) (*http.Request, error) {
+func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Context, account *Account, body []byte, token, tokenType, modelID string, reqStream bool, mimicClaudeCode bool) (*http.Request, error) {
// 确定目标URL
targetURL := claudeAPIURL
if account.Type == AccountTypeAPIKey {
@@ -2632,7 +2646,7 @@ func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Contex
// OAuth账号:应用统一指纹
var fingerprint *Fingerprint
- if account.IsOAuth() && s.identityService != nil {
+ if account.IsOAuth() && mimicClaudeCode && s.identityService != nil {
// 1. 获取或创建指纹(包含随机生成的ClientID)
fp, err := s.identityService.GetOrCreateFingerprint(ctx, account.ID, c.Request.Header)
if err != nil {
@@ -2685,12 +2699,12 @@ func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Contex
if req.Header.Get("anthropic-version") == "" {
req.Header.Set("anthropic-version", "2023-06-01")
}
- if tokenType == "oauth" {
+ if tokenType == "oauth" && mimicClaudeCode {
applyClaudeOAuthHeaderDefaults(req, reqStream)
}
// 处理anthropic-beta header(OAuth账号需要特殊处理)
- if tokenType == "oauth" {
+ if tokenType == "oauth" && mimicClaudeCode {
req.Header.Set("anthropic-beta", s.getBetaHeader(modelID, c.GetHeader("anthropic-beta")))
} else if s.cfg != nil && s.cfg.Gateway.InjectBetaForAPIKey && req.Header.Get("anthropic-beta") == "" {
// API-key:仅在请求显式使用 beta 特性且客户端未提供时,按需补齐(默认关闭)
@@ -3070,7 +3084,7 @@ type streamingResult struct {
clientDisconnect bool // 客户端是否在流式传输过程中断开
}
-func (s *GatewayService) handleStreamingResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account, startTime time.Time, originalModel, mappedModel string, toolNameMap map[string]string) (*streamingResult, error) {
+func (s *GatewayService) handleStreamingResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account, startTime time.Time, originalModel, mappedModel string, toolNameMap map[string]string, mimicClaudeCode bool) (*streamingResult, error) {
// 更新5h窗口状态
s.rateLimitService.UpdateSessionWindow(ctx, account, resp.Header)
@@ -3163,7 +3177,7 @@ func (s *GatewayService) handleStreamingResponse(ctx context.Context, resp *http
}
needModelReplace := originalModel != mappedModel
- rewriteTools := account.IsOAuth()
+ rewriteTools := mimicClaudeCode
clientDisconnected := false // 客户端断开标志,断开后继续读取上游以获取完整usage
for {
@@ -3327,6 +3341,37 @@ func rewriteToolNamesInValue(value any, toolNameMap map[string]string) bool {
}
}
+func replaceToolNamesInText(text string, toolNameMap map[string]string) string {
+ if text == "" {
+ return text
+ }
+ output := toolNameFieldRe.ReplaceAllStringFunc(text, func(match string) string {
+ submatches := toolNameFieldRe.FindStringSubmatch(match)
+ if len(submatches) < 2 {
+ return match
+ }
+ name := submatches[1]
+ mapped := normalizeToolNameForOpenCode(name, toolNameMap)
+ if mapped == name {
+ return match
+ }
+ return strings.Replace(match, name, mapped, 1)
+ })
+ output = modelFieldRe.ReplaceAllStringFunc(output, func(match string) string {
+ submatches := modelFieldRe.FindStringSubmatch(match)
+ if len(submatches) < 2 {
+ return match
+ }
+ model := submatches[1]
+ mapped := claude.DenormalizeModelID(model)
+ if mapped == model {
+ return match
+ }
+ return strings.Replace(match, model, mapped, 1)
+ })
+ return output
+}
+
func (s *GatewayService) replaceToolNamesInSSELine(line string, toolNameMap map[string]string) string {
if !sseDataRe.MatchString(line) {
return line
@@ -3338,7 +3383,11 @@ func (s *GatewayService) replaceToolNamesInSSELine(line string, toolNameMap map[
var event map[string]any
if err := json.Unmarshal([]byte(data), &event); err != nil {
- return line
+ replaced := replaceToolNamesInText(data, toolNameMap)
+ if replaced == data {
+ return line
+ }
+ return "data: " + replaced
}
if !rewriteToolNamesInValue(event, toolNameMap) {
return line
@@ -3391,7 +3440,7 @@ func (s *GatewayService) parseSSEUsage(data string, usage *ClaudeUsage) {
}
}
-func (s *GatewayService) handleNonStreamingResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account, originalModel, mappedModel string, toolNameMap map[string]string) (*ClaudeUsage, error) {
+func (s *GatewayService) handleNonStreamingResponse(ctx context.Context, resp *http.Response, c *gin.Context, account *Account, originalModel, mappedModel string, toolNameMap map[string]string, mimicClaudeCode bool) (*ClaudeUsage, error) {
// 更新5h窗口状态
s.rateLimitService.UpdateSessionWindow(ctx, account, resp.Header)
@@ -3412,7 +3461,7 @@ func (s *GatewayService) handleNonStreamingResponse(ctx context.Context, resp *h
if originalModel != mappedModel {
body = s.replaceModelInResponseBody(body, mappedModel, originalModel)
}
- if account.IsOAuth() {
+ if mimicClaudeCode {
body = s.replaceToolNamesInResponseBody(body, toolNameMap)
}
@@ -3458,7 +3507,11 @@ func (s *GatewayService) replaceToolNamesInResponseBody(body []byte, toolNameMap
}
var resp map[string]any
if err := json.Unmarshal(body, &resp); err != nil {
- return body
+ replaced := replaceToolNamesInText(string(body), toolNameMap)
+ if replaced == string(body) {
+ return body
+ }
+ return []byte(replaced)
}
if !rewriteToolNamesInValue(resp, toolNameMap) {
return body
@@ -3635,7 +3688,10 @@ func (s *GatewayService) ForwardCountTokens(ctx context.Context, c *gin.Context,
body := parsed.Body
reqModel := parsed.Model
- if account.IsOAuth() {
+ isClaudeCode := isClaudeCodeRequest(ctx, c, parsed)
+ shouldMimicClaudeCode := account.IsOAuth() && !isClaudeCode
+
+ if shouldMimicClaudeCode {
normalizeOpts := claudeOAuthNormalizeOptions{stripSystemCacheControl: true}
body, reqModel, _ = normalizeClaudeOAuthRequestBody(body, reqModel, normalizeOpts)
}
@@ -3666,7 +3722,7 @@ func (s *GatewayService) ForwardCountTokens(ctx context.Context, c *gin.Context,
}
// 构建上游请求
- upstreamReq, err := s.buildCountTokensRequest(ctx, c, account, body, token, tokenType, reqModel)
+ upstreamReq, err := s.buildCountTokensRequest(ctx, c, account, body, token, tokenType, reqModel, shouldMimicClaudeCode)
if err != nil {
s.countTokensError(c, http.StatusInternalServerError, "api_error", "Failed to build request")
return err
@@ -3699,7 +3755,7 @@ func (s *GatewayService) ForwardCountTokens(ctx context.Context, c *gin.Context,
log.Printf("Account %d: detected thinking block signature error on count_tokens, retrying with filtered thinking blocks", account.ID)
filteredBody := FilterThinkingBlocksForRetry(body)
- retryReq, buildErr := s.buildCountTokensRequest(ctx, c, account, filteredBody, token, tokenType, reqModel)
+ retryReq, buildErr := s.buildCountTokensRequest(ctx, c, account, filteredBody, token, tokenType, reqModel, shouldMimicClaudeCode)
if buildErr == nil {
retryResp, retryErr := s.httpUpstream.Do(retryReq, proxyURL, account.ID, account.Concurrency)
if retryErr == nil {
@@ -3764,7 +3820,7 @@ func (s *GatewayService) ForwardCountTokens(ctx context.Context, c *gin.Context,
}
// buildCountTokensRequest 构建 count_tokens 上游请求
-func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Context, account *Account, body []byte, token, tokenType, modelID string) (*http.Request, error) {
+func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Context, account *Account, body []byte, token, tokenType, modelID string, mimicClaudeCode bool) (*http.Request, error) {
// 确定目标 URL
targetURL := claudeAPICountTokensURL
if account.Type == AccountTypeAPIKey {
@@ -3779,7 +3835,7 @@ func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Con
}
// OAuth 账号:应用统一指纹和重写 userID
- if account.IsOAuth() && s.identityService != nil {
+ if account.IsOAuth() && mimicClaudeCode && s.identityService != nil {
fp, err := s.identityService.GetOrCreateFingerprint(ctx, account.ID, c.Request.Header)
if err == nil {
accountUUID := account.GetExtraString("account_uuid")
@@ -3814,7 +3870,7 @@ func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Con
}
// OAuth 账号:应用指纹到请求头
- if account.IsOAuth() && s.identityService != nil {
+ if account.IsOAuth() && mimicClaudeCode && s.identityService != nil {
fp, _ := s.identityService.GetOrCreateFingerprint(ctx, account.ID, c.Request.Header)
if fp != nil {
s.identityService.ApplyFingerprint(req, fp)
@@ -3828,13 +3884,13 @@ func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Con
if req.Header.Get("anthropic-version") == "" {
req.Header.Set("anthropic-version", "2023-06-01")
}
- if tokenType == "oauth" {
+ if tokenType == "oauth" && mimicClaudeCode {
applyClaudeOAuthHeaderDefaults(req, false)
}
// OAuth 账号:处理 anthropic-beta header
- if tokenType == "oauth" {
- req.Header.Set("anthropic-beta", s.getBetaHeader(modelID, c.GetHeader("anthropic-beta")))
+ if tokenType == "oauth" && mimicClaudeCode {
+ req.Header.Set("anthropic-beta", claude.CountTokensBetaHeader)
} else if s.cfg != nil && s.cfg.Gateway.InjectBetaForAPIKey && req.Header.Get("anthropic-beta") == "" {
// API-key:与 messages 同步的按需 beta 注入(默认关闭)
if requestNeedsBetaFeatures(body) {
From 0962ba43c0fcc517225d716b056cc3dd3d71125f Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Fri, 16 Jan 2026 00:41:29 +0800
Subject: [PATCH 09/99] =?UTF-8?q?fix(=E7=BD=91=E5=85=B3):=20=E8=A1=A5?=
=?UTF-8?q?=E9=BD=90=E9=9D=9E=20Claude=20Code=20OAuth=20=E5=85=BC=E5=AE=B9?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
backend/internal/pkg/claude/constants.go | 6 +
backend/internal/service/account.go | 16 ++
backend/internal/service/gateway_service.go | 239 +++++++++++++++++---
3 files changed, 232 insertions(+), 29 deletions(-)
diff --git a/backend/internal/pkg/claude/constants.go b/backend/internal/pkg/claude/constants.go
index f60412c2..0c6e9b4c 100644
--- a/backend/internal/pkg/claude/constants.go
+++ b/backend/internal/pkg/claude/constants.go
@@ -15,6 +15,12 @@ const (
// DefaultBetaHeader Claude Code 客户端默认的 anthropic-beta header
const DefaultBetaHeader = BetaClaudeCode + "," + BetaOAuth + "," + BetaInterleavedThinking + "," + BetaFineGrainedToolStreaming
+// MessageBetaHeaderNoTools /v1/messages 在无工具时的 beta header
+const MessageBetaHeaderNoTools = BetaOAuth + "," + BetaInterleavedThinking
+
+// MessageBetaHeaderWithTools /v1/messages 在有工具时的 beta header
+const MessageBetaHeaderWithTools = BetaClaudeCode + "," + BetaOAuth + "," + BetaInterleavedThinking
+
// CountTokensBetaHeader count_tokens 请求使用的 anthropic-beta header
const CountTokensBetaHeader = BetaClaudeCode + "," + BetaOAuth + "," + BetaInterleavedThinking + "," + BetaTokenCounting
diff --git a/backend/internal/service/account.go b/backend/internal/service/account.go
index 0d7a9cf9..9f965682 100644
--- a/backend/internal/service/account.go
+++ b/backend/internal/service/account.go
@@ -381,6 +381,22 @@ func (a *Account) GetExtraString(key string) string {
return ""
}
+func (a *Account) GetClaudeUserID() string {
+ if v := strings.TrimSpace(a.GetExtraString("claude_user_id")); v != "" {
+ return v
+ }
+ if v := strings.TrimSpace(a.GetExtraString("anthropic_user_id")); v != "" {
+ return v
+ }
+ if v := strings.TrimSpace(a.GetCredential("claude_user_id")); v != "" {
+ return v
+ }
+ if v := strings.TrimSpace(a.GetCredential("anthropic_user_id")); v != "" {
+ return v
+ }
+ return ""
+}
+
func (a *Account) IsCustomErrorCodesEnabled() bool {
if a.Type != AccountTypeAPIKey || a.Credentials == nil {
return false
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 93dc59dc..71ad0d00 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -67,6 +67,9 @@ var (
toolNameCamelRe = regexp.MustCompile(`([a-z0-9])([A-Z])`)
toolNameFieldRe = regexp.MustCompile(`"name"\s*:\s*"([^"]+)"`)
modelFieldRe = regexp.MustCompile(`"model"\s*:\s*"([^"]+)"`)
+ toolDescAbsPathRe = regexp.MustCompile(`/\/?(?:home|Users|tmp|var|opt|usr|etc)\/[^\s,\)"'\]]+`)
+ toolDescWinPathRe = regexp.MustCompile(`(?i)[A-Z]:\\[^\s,\)"'\]]+`)
+ opencodeTextRe = regexp.MustCompile(`(?i)opencode`)
claudeToolNameOverrides = map[string]string{
"bash": "Bash",
@@ -470,6 +473,22 @@ func normalizeToolNameForClaude(name string, cache map[string]string) string {
}
func normalizeToolNameForOpenCode(name string, cache map[string]string) string {
+ if name == "" {
+ return name
+ }
+ stripped := stripToolPrefix(name)
+ if cache != nil {
+ if mapped, ok := cache[stripped]; ok {
+ return mapped
+ }
+ }
+ if mapped, ok := openCodeToolOverrides[stripped]; ok {
+ return mapped
+ }
+ return toSnakeCase(stripped)
+}
+
+func normalizeParamNameForOpenCode(name string, cache map[string]string) string {
if name == "" {
return name
}
@@ -478,10 +497,63 @@ func normalizeToolNameForOpenCode(name string, cache map[string]string) string {
return mapped
}
}
- if mapped, ok := openCodeToolOverrides[name]; ok {
- return mapped
+ return name
+}
+
+func sanitizeOpenCodeText(text string) string {
+ if text == "" {
+ return text
+ }
+ text = strings.ReplaceAll(text, "OpenCode", "Claude Code")
+ text = opencodeTextRe.ReplaceAllString(text, "Claude")
+ return text
+}
+
+func sanitizeToolDescription(description string) string {
+ if description == "" {
+ return description
+ }
+ description = toolDescAbsPathRe.ReplaceAllString(description, "[path]")
+ description = toolDescWinPathRe.ReplaceAllString(description, "[path]")
+ return sanitizeOpenCodeText(description)
+}
+
+func normalizeToolInputSchema(inputSchema any, cache map[string]string) {
+ schema, ok := inputSchema.(map[string]any)
+ if !ok {
+ return
+ }
+ properties, ok := schema["properties"].(map[string]any)
+ if !ok {
+ return
+ }
+
+ newProperties := make(map[string]any, len(properties))
+ for key, value := range properties {
+ snakeKey := toSnakeCase(key)
+ newProperties[snakeKey] = value
+ if snakeKey != key && cache != nil {
+ cache[snakeKey] = key
+ }
+ }
+ schema["properties"] = newProperties
+
+ if required, ok := schema["required"].([]any); ok {
+ newRequired := make([]any, 0, len(required))
+ for _, item := range required {
+ name, ok := item.(string)
+ if !ok {
+ newRequired = append(newRequired, item)
+ continue
+ }
+ snakeName := toSnakeCase(name)
+ newRequired = append(newRequired, snakeName)
+ if snakeName != name && cache != nil {
+ cache[snakeName] = name
+ }
+ }
+ schema["required"] = newRequired
}
- return toSnakeCase(name)
}
func stripCacheControlFromSystemBlocks(system any) bool {
@@ -498,9 +570,6 @@ func stripCacheControlFromSystemBlocks(system any) bool {
if _, exists := block["cache_control"]; !exists {
continue
}
- if text, ok := block["text"].(string); ok && text == claudeCodeSystemPrompt {
- continue
- }
delete(block, "cache_control")
changed = true
}
@@ -518,6 +587,34 @@ func normalizeClaudeOAuthRequestBody(body []byte, modelID string, opts claudeOAu
toolNameMap := make(map[string]string)
+ if system, ok := req["system"]; ok {
+ switch v := system.(type) {
+ case string:
+ sanitized := sanitizeOpenCodeText(v)
+ if sanitized != v {
+ req["system"] = sanitized
+ }
+ case []any:
+ for _, item := range v {
+ block, ok := item.(map[string]any)
+ if !ok {
+ continue
+ }
+ if blockType, _ := block["type"].(string); blockType != "text" {
+ continue
+ }
+ text, ok := block["text"].(string)
+ if !ok || text == "" {
+ continue
+ }
+ sanitized := sanitizeOpenCodeText(text)
+ if sanitized != text {
+ block["text"] = sanitized
+ }
+ }
+ }
+ }
+
if rawModel, ok := req["model"].(string); ok {
normalized := claude.NormalizeModelID(rawModel)
if normalized != rawModel {
@@ -540,6 +637,15 @@ func normalizeClaudeOAuthRequestBody(body []byte, modelID string, opts claudeOAu
toolMap["name"] = normalized
}
}
+ if desc, ok := toolMap["description"].(string); ok {
+ sanitized := sanitizeToolDescription(desc)
+ if sanitized != desc {
+ toolMap["description"] = sanitized
+ }
+ }
+ if schema, ok := toolMap["input_schema"]; ok {
+ normalizeToolInputSchema(schema, toolNameMap)
+ }
tools[idx] = toolMap
}
req["tools"] = tools
@@ -551,13 +657,15 @@ func normalizeClaudeOAuthRequestBody(body []byte, modelID string, opts claudeOAu
normalized = name
}
if toolMap, ok := value.(map[string]any); ok {
- if toolName, ok := toolMap["name"].(string); ok {
- mappedName := normalizeToolNameForClaude(toolName, toolNameMap)
- if mappedName != "" && mappedName != toolName {
- toolMap["name"] = mappedName
+ toolMap["name"] = normalized
+ if desc, ok := toolMap["description"].(string); ok {
+ sanitized := sanitizeToolDescription(desc)
+ if sanitized != desc {
+ toolMap["description"] = sanitized
}
- } else if normalized != name {
- toolMap["name"] = normalized
+ }
+ if schema, ok := toolMap["input_schema"]; ok {
+ normalizeToolInputSchema(schema, toolNameMap)
}
normalizedTools[normalized] = toolMap
continue
@@ -630,7 +738,7 @@ func normalizeClaudeOAuthRequestBody(body []byte, modelID string, opts claudeOAu
}
func (s *GatewayService) buildOAuthMetadataUserID(parsed *ParsedRequest, account *Account, fp *Fingerprint) string {
- if parsed == nil || fp == nil || fp.ClientID == "" {
+ if parsed == nil || account == nil {
return ""
}
if parsed.MetadataUserID != "" {
@@ -640,13 +748,22 @@ func (s *GatewayService) buildOAuthMetadataUserID(parsed *ParsedRequest, account
if accountUUID == "" {
return ""
}
+
+ userID := strings.TrimSpace(account.GetClaudeUserID())
+ if userID == "" && fp != nil {
+ userID = fp.ClientID
+ }
+ if userID == "" {
+ return ""
+ }
+
sessionHash := s.GenerateSessionHash(parsed)
sessionID := uuid.NewString()
if sessionHash != "" {
seed := fmt.Sprintf("%d::%s", account.ID, sessionHash)
sessionID = generateSessionUUID(seed)
}
- return fmt.Sprintf("user_%s_account_%s_session_%s", fp.ClientID, accountUUID, sessionID)
+ return fmt.Sprintf("user_%s_account_%s_session_%s", userID, accountUUID, sessionID)
}
func generateSessionUUID(seed string) string {
@@ -2705,7 +2822,11 @@ func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Contex
// 处理anthropic-beta header(OAuth账号需要特殊处理)
if tokenType == "oauth" && mimicClaudeCode {
- req.Header.Set("anthropic-beta", s.getBetaHeader(modelID, c.GetHeader("anthropic-beta")))
+ if requestHasTools(body) {
+ req.Header.Set("anthropic-beta", claude.MessageBetaHeaderWithTools)
+ } else {
+ req.Header.Set("anthropic-beta", claude.MessageBetaHeaderNoTools)
+ }
} else if s.cfg != nil && s.cfg.Gateway.InjectBetaForAPIKey && req.Header.Get("anthropic-beta") == "" {
// API-key:仅在请求显式使用 beta 特性且客户端未提供时,按需补齐(默认关闭)
if requestNeedsBetaFeatures(body) {
@@ -2776,6 +2897,20 @@ func requestNeedsBetaFeatures(body []byte) bool {
return false
}
+func requestHasTools(body []byte) bool {
+ tools := gjson.GetBytes(body, "tools")
+ if !tools.Exists() {
+ return false
+ }
+ if tools.IsArray() {
+ return len(tools.Array()) > 0
+ }
+ if tools.IsObject() {
+ return len(tools.Map()) > 0
+ }
+ return false
+}
+
func defaultAPIKeyBetaHeader(body []byte) string {
modelID := gjson.GetBytes(body, "model").String()
if strings.Contains(strings.ToLower(modelID), "haiku") {
@@ -3309,6 +3444,45 @@ func (s *GatewayService) replaceModelInSSELine(line, fromModel, toModel string)
return "data: " + string(newData)
}
+func rewriteParamKeysInValue(value any, cache map[string]string) (any, bool) {
+ switch v := value.(type) {
+ case map[string]any:
+ changed := false
+ rewritten := make(map[string]any, len(v))
+ for key, item := range v {
+ newKey := normalizeParamNameForOpenCode(key, cache)
+ newItem, childChanged := rewriteParamKeysInValue(item, cache)
+ if childChanged {
+ changed = true
+ }
+ if newKey != key {
+ changed = true
+ }
+ rewritten[newKey] = newItem
+ }
+ if !changed {
+ return value, false
+ }
+ return rewritten, true
+ case []any:
+ changed := false
+ rewritten := make([]any, len(v))
+ for idx, item := range v {
+ newItem, childChanged := rewriteParamKeysInValue(item, cache)
+ if childChanged {
+ changed = true
+ }
+ rewritten[idx] = newItem
+ }
+ if !changed {
+ return value, false
+ }
+ return rewritten, true
+ default:
+ return value, false
+ }
+}
+
func rewriteToolNamesInValue(value any, toolNameMap map[string]string) bool {
switch v := value.(type) {
case map[string]any:
@@ -3321,6 +3495,15 @@ func rewriteToolNamesInValue(value any, toolNameMap map[string]string) bool {
changed = true
}
}
+ if input, ok := v["input"].(map[string]any); ok {
+ rewrittenInput, inputChanged := rewriteParamKeysInValue(input, toolNameMap)
+ if inputChanged {
+ if m, ok := rewrittenInput.(map[string]any); ok {
+ v["input"] = m
+ changed = true
+ }
+ }
+ }
}
for _, item := range v {
if rewriteToolNamesInValue(item, toolNameMap) {
@@ -3369,6 +3552,15 @@ func replaceToolNamesInText(text string, toolNameMap map[string]string) string {
}
return strings.Replace(match, model, mapped, 1)
})
+
+ for mapped, original := range toolNameMap {
+ if mapped == "" || original == "" || mapped == original {
+ continue
+ }
+ output = strings.ReplaceAll(output, "\""+mapped+"\":", "\""+original+"\":")
+ output = strings.ReplaceAll(output, "\\\""+mapped+"\\\":", "\\\""+original+"\\\":")
+ }
+
return output
}
@@ -3381,22 +3573,11 @@ func (s *GatewayService) replaceToolNamesInSSELine(line string, toolNameMap map[
return line
}
- var event map[string]any
- if err := json.Unmarshal([]byte(data), &event); err != nil {
- replaced := replaceToolNamesInText(data, toolNameMap)
- if replaced == data {
- return line
- }
- return "data: " + replaced
- }
- if !rewriteToolNamesInValue(event, toolNameMap) {
+ replaced := replaceToolNamesInText(data, toolNameMap)
+ if replaced == data {
return line
}
- newData, err := json.Marshal(event)
- if err != nil {
- return line
- }
- return "data: " + string(newData)
+ return "data: " + replaced
}
func (s *GatewayService) parseSSEUsage(data string, usage *ClaudeUsage) {
From 0c011b889b980ba4626703af4d54e1879cfd3f9c Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Fri, 16 Jan 2026 23:15:52 +0800
Subject: [PATCH 10/99] =?UTF-8?q?fix(=E7=BD=91=E5=85=B3):=20Claude=20Code?=
=?UTF-8?q?=20OAuth=20=E8=A1=A5=E9=BD=90=20oauth=20beta?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
backend/internal/service/gateway_service.go | 34 ++++++++++++++++-----
1 file changed, 27 insertions(+), 7 deletions(-)
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 71ad0d00..8b4871c9 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -2820,12 +2820,19 @@ func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Contex
applyClaudeOAuthHeaderDefaults(req, reqStream)
}
- // 处理anthropic-beta header(OAuth账号需要特殊处理)
- if tokenType == "oauth" && mimicClaudeCode {
- if requestHasTools(body) {
- req.Header.Set("anthropic-beta", claude.MessageBetaHeaderWithTools)
+ // 处理 anthropic-beta header(OAuth 账号需要包含 oauth beta)
+ if tokenType == "oauth" {
+ if mimicClaudeCode {
+ // 非 Claude Code 客户端:按 Claude Code 规则生成 beta header
+ if requestHasTools(body) {
+ req.Header.Set("anthropic-beta", claude.MessageBetaHeaderWithTools)
+ } else {
+ req.Header.Set("anthropic-beta", claude.MessageBetaHeaderNoTools)
+ }
} else {
- req.Header.Set("anthropic-beta", claude.MessageBetaHeaderNoTools)
+ // Claude Code 客户端:尽量透传原始 header,仅补齐 oauth beta
+ clientBetaHeader := req.Header.Get("anthropic-beta")
+ req.Header.Set("anthropic-beta", s.getBetaHeader(modelID, clientBetaHeader))
}
} else if s.cfg != nil && s.cfg.Gateway.InjectBetaForAPIKey && req.Header.Get("anthropic-beta") == "" {
// API-key:仅在请求显式使用 beta 特性且客户端未提供时,按需补齐(默认关闭)
@@ -4070,8 +4077,21 @@ func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Con
}
// OAuth 账号:处理 anthropic-beta header
- if tokenType == "oauth" && mimicClaudeCode {
- req.Header.Set("anthropic-beta", claude.CountTokensBetaHeader)
+ if tokenType == "oauth" {
+ if mimicClaudeCode {
+ req.Header.Set("anthropic-beta", claude.CountTokensBetaHeader)
+ } else {
+ clientBetaHeader := req.Header.Get("anthropic-beta")
+ if clientBetaHeader == "" {
+ req.Header.Set("anthropic-beta", claude.CountTokensBetaHeader)
+ } else {
+ beta := s.getBetaHeader(modelID, clientBetaHeader)
+ if !strings.Contains(beta, claude.BetaTokenCounting) {
+ beta = beta + "," + claude.BetaTokenCounting
+ }
+ req.Header.Set("anthropic-beta", beta)
+ }
+ }
} else if s.cfg != nil && s.cfg.Gateway.InjectBetaForAPIKey && req.Header.Get("anthropic-beta") == "" {
// API-key:与 messages 同步的按需 beta 注入(默认关闭)
if requestNeedsBetaFeatures(body) {
From 8917a3ea8fa4ffa8943e32513b3cee5528ef516d Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Sat, 17 Jan 2026 00:27:36 +0800
Subject: [PATCH 11/99] =?UTF-8?q?fix(=E7=BD=91=E5=85=B3):=20=E4=BF=AE?=
=?UTF-8?q?=E5=A4=8D=20golangci-lint?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
backend/internal/service/gateway_service.go | 10 +++-------
1 file changed, 3 insertions(+), 7 deletions(-)
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 8b4871c9..fb2d40a3 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -439,7 +439,7 @@ func toPascalCase(value string) string {
}
runes := []rune(lower)
runes[0] = unicode.ToUpper(runes[0])
- builder.WriteString(string(runes))
+ _, _ = builder.WriteString(string(runes))
}
return builder.String()
}
@@ -723,12 +723,8 @@ func normalizeClaudeOAuthRequestBody(body []byte, modelID string, opts claudeOAu
}
}
- if _, ok := req["temperature"]; ok {
- delete(req, "temperature")
- }
- if _, ok := req["tool_choice"]; ok {
- delete(req, "tool_choice")
- }
+ delete(req, "temperature")
+ delete(req, "tool_choice")
newBody, err := json.Marshal(req)
if err != nil {
From 4e75d8fda9f010e856741328d9a49ee66a1b3a53 Mon Sep 17 00:00:00 2001
From: nick8802754751 <>
Date: Sat, 17 Jan 2026 16:06:44 +0800
Subject: [PATCH 12/99] =?UTF-8?q?fix:=20=E6=B7=BB=E5=8A=A0=E6=B7=B7?=
=?UTF-8?q?=E5=90=88=E6=B8=A0=E9=81=93=E8=AD=A6=E5=91=8A=E7=A1=AE=E8=AE=A4?=
=?UTF-8?q?=E6=A1=86=E5=92=8C=E8=BF=87=E6=BB=A4=20prompt=5Fcache=5Fretenti?=
=?UTF-8?q?on=20=E5=8F=82=E6=95=B0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
- 前端: EditAccountModal 和 CreateAccountModal 添加 409 mixed_channel_warning 处理
- 前端: 弹出确认框让用户确认混合渠道风险
- 后端: 过滤 OpenAI 请求中的 prompt_cache_retention 参数(上游不支持)
- 添加中英文翻译
Co-authored-by: factory-droid[bot] <138933559+factory-droid[bot]@users.noreply.github.com>
---
.../service/openai_gateway_service.go | 6 ++
.../components/account/CreateAccountModal.vue | 60 ++++++++++++++-----
.../components/account/EditAccountModal.vue | 32 +++++++++-
frontend/src/i18n/locales/en.ts | 1 +
frontend/src/i18n/locales/zh.ts | 1 +
5 files changed, 82 insertions(+), 18 deletions(-)
diff --git a/backend/internal/service/openai_gateway_service.go b/backend/internal/service/openai_gateway_service.go
index c7d94882..45b4c69c 100644
--- a/backend/internal/service/openai_gateway_service.go
+++ b/backend/internal/service/openai_gateway_service.go
@@ -649,6 +649,12 @@ func (s *OpenAIGatewayService) Forward(ctx context.Context, c *gin.Context, acco
bodyModified = true
}
}
+
+ // Remove prompt_cache_retention (not supported by upstream OpenAI API)
+ if _, has := reqBody["prompt_cache_retention"]; has {
+ delete(reqBody, "prompt_cache_retention")
+ bodyModified = true
+ }
}
// Re-serialize body only if modified
diff --git a/frontend/src/components/account/CreateAccountModal.vue b/frontend/src/components/account/CreateAccountModal.vue
index c81de00e..05f328ac 100644
--- a/frontend/src/components/account/CreateAccountModal.vue
+++ b/frontend/src/components/account/CreateAccountModal.vue
@@ -2157,6 +2157,46 @@ const handleClose = () => {
emit('close')
}
+// Helper function to create account with mixed channel warning handling
+const doCreateAccount = async (payload: any, confirmMixedChannelRisk = false) => {
+ if (confirmMixedChannelRisk) {
+ payload.confirm_mixed_channel_risk = true
+ }
+
+ submitting.value = true
+ try {
+ await adminAPI.accounts.create(payload)
+ appStore.showSuccess(t('admin.accounts.accountCreated'))
+ emit('created')
+ handleClose()
+ } catch (error: any) {
+ // Handle 409 mixed_channel_warning - show confirmation dialog
+ if (error.response?.status === 409 && error.response?.data?.error === 'mixed_channel_warning') {
+ const details = error.response.data.details || {}
+ const groupName = details.group_name || 'Unknown'
+ const currentPlatform = details.current_platform || 'Unknown'
+ const otherPlatform = details.other_platform || 'Unknown'
+
+ const confirmMessage = t('admin.accounts.mixedChannelWarning', {
+ groupName,
+ currentPlatform,
+ otherPlatform
+ })
+
+ if (confirm(confirmMessage)) {
+ // Retry with confirmation flag
+ submitting.value = false
+ await doCreateAccount(payload, true)
+ return
+ }
+ } else {
+ appStore.showError(error.response?.data?.detail || t('admin.accounts.failedToCreate'))
+ }
+ } finally {
+ submitting.value = false
+ }
+}
+
const handleSubmit = async () => {
// For OAuth-based type, handle OAuth flow (goes to step 2)
if (isOAuthFlow.value) {
@@ -2213,21 +2253,11 @@ const handleSubmit = async () => {
form.credentials = credentials
- submitting.value = true
- try {
- await adminAPI.accounts.create({
- ...form,
- group_ids: form.group_ids,
- auto_pause_on_expired: autoPauseOnExpired.value
- })
- appStore.showSuccess(t('admin.accounts.accountCreated'))
- emit('created')
- handleClose()
- } catch (error: any) {
- appStore.showError(error.response?.data?.detail || t('admin.accounts.failedToCreate'))
- } finally {
- submitting.value = false
- }
+ await doCreateAccount({
+ ...form,
+ group_ids: form.group_ids,
+ auto_pause_on_expired: autoPauseOnExpired.value
+ })
}
const goBackToBasicInfo = () => {
diff --git a/frontend/src/components/account/EditAccountModal.vue b/frontend/src/components/account/EditAccountModal.vue
index d27364f1..63b54df0 100644
--- a/frontend/src/components/account/EditAccountModal.vue
+++ b/frontend/src/components/account/EditAccountModal.vue
@@ -8,7 +8,7 @@
+
+
+
+
From e12dd079fd29a30d0f3a5bc96d69c8e27ef5195c Mon Sep 17 00:00:00 2001
From: shaw
Date: Wed, 28 Jan 2026 17:26:32 +0800
Subject: [PATCH 44/99] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=E8=B0=83=E5=BA=A6?=
=?UTF-8?q?=E5=99=A8=E7=A9=BA=E7=BC=93=E5=AD=98=E5=AF=BC=E8=87=B4=E7=9A=84?=
=?UTF-8?q?=E7=AB=9E=E6=80=81=E6=9D=A1=E4=BB=B6bug?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
当新分组创建后立即绑定账号时,调度器会错误地将空快照视为有效缓存命中,
导致返回没有可调度的账号。现在空快照会触发数据库回退查询。
---
backend/internal/repository/scheduler_cache.go | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/backend/internal/repository/scheduler_cache.go b/backend/internal/repository/scheduler_cache.go
index 13b22107..4f447e4f 100644
--- a/backend/internal/repository/scheduler_cache.go
+++ b/backend/internal/repository/scheduler_cache.go
@@ -58,7 +58,9 @@ func (c *schedulerCache) GetSnapshot(ctx context.Context, bucket service.Schedul
return nil, false, err
}
if len(ids) == 0 {
- return []*service.Account{}, true, nil
+ // 空快照视为缓存未命中,触发数据库回退查询
+ // 这解决了新分组创建后立即绑定账号时的竞态条件问题
+ return nil, false, nil
}
keys := make([]string, 0, len(ids))
From cadca752c492ef923c66b7c7b4c50a97c16cc1b5 Mon Sep 17 00:00:00 2001
From: shaw
Date: Wed, 28 Jan 2026 18:35:20 +0800
Subject: [PATCH 45/99] =?UTF-8?q?=E4=BF=AE=E5=A4=8DSSE=E6=B5=81=E5=BC=8F?=
=?UTF-8?q?=E5=93=8D=E5=BA=94=E4=B8=ADusage=E6=95=B0=E6=8D=AE=E8=A2=AB?=
=?UTF-8?q?=E8=A6=86=E7=9B=96=E7=9A=84=E9=97=AE=E9=A2=98?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
backend/internal/service/gateway_service.go | 21 +++++++++++++++------
1 file changed, 15 insertions(+), 6 deletions(-)
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 5819f15a..2e3ba93e 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -3372,12 +3372,21 @@ func (s *GatewayService) parseSSEUsage(data string, usage *ClaudeUsage) {
} `json:"usage"`
}
if json.Unmarshal([]byte(data), &msgDelta) == nil && msgDelta.Type == "message_delta" {
- // message_delta 是推理结束后的最终统计,应完全覆盖 message_start 的数据
- // 这对于 Claude API 和 GLM 等兼容 API 都是正确的行为
- usage.InputTokens = msgDelta.Usage.InputTokens
- usage.OutputTokens = msgDelta.Usage.OutputTokens
- usage.CacheCreationInputTokens = msgDelta.Usage.CacheCreationInputTokens
- usage.CacheReadInputTokens = msgDelta.Usage.CacheReadInputTokens
+ // message_delta 仅覆盖存在且非0的字段
+ // 避免覆盖 message_start 中已有的值(如 input_tokens)
+ // Claude API 的 message_delta 通常只包含 output_tokens
+ if msgDelta.Usage.InputTokens > 0 {
+ usage.InputTokens = msgDelta.Usage.InputTokens
+ }
+ if msgDelta.Usage.OutputTokens > 0 {
+ usage.OutputTokens = msgDelta.Usage.OutputTokens
+ }
+ if msgDelta.Usage.CacheCreationInputTokens > 0 {
+ usage.CacheCreationInputTokens = msgDelta.Usage.CacheCreationInputTokens
+ }
+ if msgDelta.Usage.CacheReadInputTokens > 0 {
+ usage.CacheReadInputTokens = msgDelta.Usage.CacheReadInputTokens
+ }
}
}
From 31f817d189c6db22940c8b836c50f48073dae61a Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Thu, 29 Jan 2026 01:28:43 +0800
Subject: [PATCH 46/99] fix: add newline separation for Claude Code system
prompt
---
backend/internal/service/account_test_service.go | 2 +-
backend/internal/service/gateway_service.go | 9 ++++++---
2 files changed, 7 insertions(+), 4 deletions(-)
diff --git a/backend/internal/service/account_test_service.go b/backend/internal/service/account_test_service.go
index 46376c69..3290fe52 100644
--- a/backend/internal/service/account_test_service.go
+++ b/backend/internal/service/account_test_service.go
@@ -123,7 +123,7 @@ func createTestPayload(modelID string) (map[string]any, error) {
"system": []map[string]any{
{
"type": "text",
- "text": "You are Claude Code, Anthropic's official CLI for Claude.",
+ "text": claudeCodeSystemPrompt,
"cache_control": map[string]string{
"type": "ephemeral",
},
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index b46e856e..b1507245 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -39,7 +39,9 @@ const (
claudeAPICountTokensURL = "https://api.anthropic.com/v1/messages/count_tokens?beta=true"
stickySessionTTL = time.Hour // 粘性会话TTL
defaultMaxLineSize = 40 * 1024 * 1024
- claudeCodeSystemPrompt = "You are Claude Code, Anthropic's official CLI for Claude."
+ // Keep a trailing blank line so that when upstream concatenates system strings,
+ // the injected Claude Code banner doesn't run into the next system instruction.
+ claudeCodeSystemPrompt = "You are Claude Code, Anthropic's official CLI for Claude.\n\n"
maxCacheControlBlocks = 4 // Anthropic API 允许的最大 cache_control 块数量
)
@@ -2479,7 +2481,8 @@ func injectClaudeCodePrompt(body []byte, system any) []byte {
case nil:
newSystem = []any{claudeCodeBlock}
case string:
- if v == "" || v == claudeCodeSystemPrompt {
+ // Be tolerant of older/newer clients that may differ only by trailing whitespace/newlines.
+ if strings.TrimSpace(v) == "" || strings.TrimSpace(v) == strings.TrimSpace(claudeCodeSystemPrompt) {
newSystem = []any{claudeCodeBlock}
} else {
newSystem = []any{claudeCodeBlock, map[string]any{"type": "text", "text": v}}
@@ -2489,7 +2492,7 @@ func injectClaudeCodePrompt(body []byte, system any) []byte {
newSystem = append(newSystem, claudeCodeBlock)
for _, item := range v {
if m, ok := item.(map[string]any); ok {
- if text, ok := m["text"].(string); ok && text == claudeCodeSystemPrompt {
+ if text, ok := m["text"].(string); ok && strings.TrimSpace(text) == strings.TrimSpace(claudeCodeSystemPrompt) {
continue
}
}
From 4d566f68b687cf09e7f523d4b8a3342ccbaa2553 Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Thu, 29 Jan 2026 01:34:58 +0800
Subject: [PATCH 47/99] chore: gofmt
---
backend/internal/service/gateway_service.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index b1507245..01663ae7 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -42,7 +42,7 @@ const (
// Keep a trailing blank line so that when upstream concatenates system strings,
// the injected Claude Code banner doesn't run into the next system instruction.
claudeCodeSystemPrompt = "You are Claude Code, Anthropic's official CLI for Claude.\n\n"
- maxCacheControlBlocks = 4 // Anthropic API 允许的最大 cache_control 块数量
+ maxCacheControlBlocks = 4 // Anthropic API 允许的最大 cache_control 块数量
)
func (s *GatewayService) debugModelRoutingEnabled() bool {
From 723e54013a2196daa19371db1884e0c016b61b6a Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Thu, 29 Jan 2026 01:49:51 +0800
Subject: [PATCH 48/99] fix(oauth): mimic Claude Code metadata and beta headers
---
backend/internal/pkg/claude/constants.go | 7 ++-
.../service/gateway_oauth_metadata_test.go | 62 +++++++++++++++++++
backend/internal/service/gateway_service.go | 17 +++--
3 files changed, 79 insertions(+), 7 deletions(-)
create mode 100644 backend/internal/service/gateway_oauth_metadata_test.go
diff --git a/backend/internal/pkg/claude/constants.go b/backend/internal/pkg/claude/constants.go
index 0c6e9b4c..fb95ffe2 100644
--- a/backend/internal/pkg/claude/constants.go
+++ b/backend/internal/pkg/claude/constants.go
@@ -16,7 +16,12 @@ const (
const DefaultBetaHeader = BetaClaudeCode + "," + BetaOAuth + "," + BetaInterleavedThinking + "," + BetaFineGrainedToolStreaming
// MessageBetaHeaderNoTools /v1/messages 在无工具时的 beta header
-const MessageBetaHeaderNoTools = BetaOAuth + "," + BetaInterleavedThinking
+//
+// NOTE: Claude Code OAuth credentials are scoped to Claude Code. When we "mimic"
+// Claude Code for non-Claude-Code clients, we must include the claude-code beta
+// even if the request doesn't use tools, otherwise upstream may reject the
+// request as a non-Claude-Code API request.
+const MessageBetaHeaderNoTools = BetaClaudeCode + "," + BetaOAuth + "," + BetaInterleavedThinking
// MessageBetaHeaderWithTools /v1/messages 在有工具时的 beta header
const MessageBetaHeaderWithTools = BetaClaudeCode + "," + BetaOAuth + "," + BetaInterleavedThinking
diff --git a/backend/internal/service/gateway_oauth_metadata_test.go b/backend/internal/service/gateway_oauth_metadata_test.go
new file mode 100644
index 00000000..ed6f1887
--- /dev/null
+++ b/backend/internal/service/gateway_oauth_metadata_test.go
@@ -0,0 +1,62 @@
+package service
+
+import (
+ "regexp"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestBuildOAuthMetadataUserID_FallbackWithoutAccountUUID(t *testing.T) {
+ svc := &GatewayService{}
+
+ parsed := &ParsedRequest{
+ Model: "claude-sonnet-4-5",
+ Stream: true,
+ MetadataUserID: "",
+ System: nil,
+ Messages: nil,
+ }
+
+ account := &Account{
+ ID: 123,
+ Type: AccountTypeOAuth,
+ Extra: map[string]any{}, // intentionally missing account_uuid / claude_user_id
+ }
+
+ fp := &Fingerprint{ClientID: "deadbeef"} // should be used as user id in legacy format
+
+ got := svc.buildOAuthMetadataUserID(parsed, account, fp)
+ require.NotEmpty(t, got)
+
+ // Legacy format: user_{client}_account__session_{uuid}
+ re := regexp.MustCompile(`^user_[a-zA-Z0-9]+_account__session_[a-f0-9-]{36}$`)
+ require.True(t, re.MatchString(got), "unexpected user_id format: %s", got)
+}
+
+func TestBuildOAuthMetadataUserID_UsesAccountUUIDWhenPresent(t *testing.T) {
+ svc := &GatewayService{}
+
+ parsed := &ParsedRequest{
+ Model: "claude-sonnet-4-5",
+ Stream: true,
+ MetadataUserID: "",
+ }
+
+ account := &Account{
+ ID: 123,
+ Type: AccountTypeOAuth,
+ Extra: map[string]any{
+ "account_uuid": "acc-uuid",
+ "claude_user_id": "clientid123",
+ "anthropic_user_id": "",
+ },
+ }
+
+ got := svc.buildOAuthMetadataUserID(parsed, account, nil)
+ require.NotEmpty(t, got)
+
+ // New format: user_{client}_account_{account_uuid}_session_{uuid}
+ re := regexp.MustCompile(`^user_clientid123_account_acc-uuid_session_[a-f0-9-]{36}$`)
+ require.True(t, re.MatchString(got), "unexpected user_id format: %s", got)
+}
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 01663ae7..1ebd1246 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -795,17 +795,15 @@ func (s *GatewayService) buildOAuthMetadataUserID(parsed *ParsedRequest, account
if parsed.MetadataUserID != "" {
return ""
}
- accountUUID := account.GetExtraString("account_uuid")
- if accountUUID == "" {
- return ""
- }
userID := strings.TrimSpace(account.GetClaudeUserID())
if userID == "" && fp != nil {
userID = fp.ClientID
}
if userID == "" {
- return ""
+ // Fall back to a random, well-formed client id so we can still satisfy
+ // Claude Code OAuth requirements when account metadata is incomplete.
+ userID = generateClientID()
}
sessionHash := s.GenerateSessionHash(parsed)
@@ -814,7 +812,14 @@ func (s *GatewayService) buildOAuthMetadataUserID(parsed *ParsedRequest, account
seed := fmt.Sprintf("%d::%s", account.ID, sessionHash)
sessionID = generateSessionUUID(seed)
}
- return fmt.Sprintf("user_%s_account_%s_session_%s", userID, accountUUID, sessionID)
+
+ // Prefer the newer format that includes account_uuid (if present),
+ // otherwise fall back to the legacy Claude Code format.
+ accountUUID := strings.TrimSpace(account.GetExtraString("account_uuid"))
+ if accountUUID != "" {
+ return fmt.Sprintf("user_%s_account_%s_session_%s", userID, accountUUID, sessionID)
+ }
+ return fmt.Sprintf("user_%s_account__session_%s", userID, sessionID)
}
func generateSessionUUID(seed string) string {
From be3b788b8fd0b9a6715c6c9cfeddfaed4fa9ff65 Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Thu, 29 Jan 2026 02:03:54 +0800
Subject: [PATCH 49/99] fix: also prefix next system block with Claude Code
banner
---
.../internal/service/gateway_prompt_test.go | 9 +++++---
backend/internal/service/gateway_service.go | 22 ++++++++++++++++++-
2 files changed, 27 insertions(+), 4 deletions(-)
diff --git a/backend/internal/service/gateway_prompt_test.go b/backend/internal/service/gateway_prompt_test.go
index b056f8fa..52c75d1d 100644
--- a/backend/internal/service/gateway_prompt_test.go
+++ b/backend/internal/service/gateway_prompt_test.go
@@ -2,6 +2,7 @@ package service
import (
"encoding/json"
+ "strings"
"testing"
"github.com/stretchr/testify/require"
@@ -134,6 +135,8 @@ func TestSystemIncludesClaudeCodePrompt(t *testing.T) {
}
func TestInjectClaudeCodePrompt(t *testing.T) {
+ claudePrefix := strings.TrimSpace(claudeCodeSystemPrompt)
+
tests := []struct {
name string
body string
@@ -162,7 +165,7 @@ func TestInjectClaudeCodePrompt(t *testing.T) {
system: "Custom prompt",
wantSystemLen: 2,
wantFirstText: claudeCodeSystemPrompt,
- wantSecondText: "Custom prompt",
+ wantSecondText: claudePrefix + "\n\nCustom prompt",
},
{
name: "string system equals Claude Code prompt",
@@ -178,7 +181,7 @@ func TestInjectClaudeCodePrompt(t *testing.T) {
// Claude Code + Custom = 2
wantSystemLen: 2,
wantFirstText: claudeCodeSystemPrompt,
- wantSecondText: "Custom",
+ wantSecondText: claudePrefix + "\n\nCustom",
},
{
name: "array system with existing Claude Code prompt (should dedupe)",
@@ -190,7 +193,7 @@ func TestInjectClaudeCodePrompt(t *testing.T) {
// Claude Code at start + Other = 2 (deduped)
wantSystemLen: 2,
wantFirstText: claudeCodeSystemPrompt,
- wantSecondText: "Other",
+ wantSecondText: claudePrefix + "\n\nOther",
},
{
name: "empty array",
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 1ebd1246..c23b4f36 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -2479,6 +2479,10 @@ func injectClaudeCodePrompt(body []byte, system any) []byte {
"text": claudeCodeSystemPrompt,
"cache_control": map[string]string{"type": "ephemeral"},
}
+ // Opencode plugin applies an extra safeguard: it not only prepends the Claude Code
+ // banner, it also prefixes the next system instruction with the same banner plus
+ // a blank line. This helps when upstream concatenates system instructions.
+ claudeCodePrefix := strings.TrimSpace(claudeCodeSystemPrompt)
var newSystem []any
@@ -2490,16 +2494,32 @@ func injectClaudeCodePrompt(body []byte, system any) []byte {
if strings.TrimSpace(v) == "" || strings.TrimSpace(v) == strings.TrimSpace(claudeCodeSystemPrompt) {
newSystem = []any{claudeCodeBlock}
} else {
- newSystem = []any{claudeCodeBlock, map[string]any{"type": "text", "text": v}}
+ // Mirror opencode behavior: keep the banner as a separate system entry,
+ // but also prefix the next system text with the banner.
+ merged := v
+ if !strings.HasPrefix(v, claudeCodePrefix) {
+ merged = claudeCodePrefix + "\n\n" + v
+ }
+ newSystem = []any{claudeCodeBlock, map[string]any{"type": "text", "text": merged}}
}
case []any:
newSystem = make([]any, 0, len(v)+1)
newSystem = append(newSystem, claudeCodeBlock)
+ prefixedNext := false
for _, item := range v {
if m, ok := item.(map[string]any); ok {
if text, ok := m["text"].(string); ok && strings.TrimSpace(text) == strings.TrimSpace(claudeCodeSystemPrompt) {
continue
}
+ // Prefix the first subsequent text system block once.
+ if !prefixedNext {
+ if blockType, _ := m["type"].(string); blockType == "text" {
+ if text, ok := m["text"].(string); ok && strings.TrimSpace(text) != "" && !strings.HasPrefix(text, claudeCodePrefix) {
+ m["text"] = claudeCodePrefix + "\n\n" + text
+ prefixedNext = true
+ }
+ }
+ }
}
newSystem = append(newSystem, item)
}
From 4d40fb6b602a0469bbdaa56bd047493a9d712f32 Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Thu, 29 Jan 2026 02:36:28 +0800
Subject: [PATCH 50/99] fix(oauth): merge anthropic-beta and force Claude Code
headers in mimic mode
---
backend/internal/service/gateway_beta_test.go | 23 +++++++
backend/internal/service/gateway_service.go | 66 +++++++++++++++++--
2 files changed, 84 insertions(+), 5 deletions(-)
create mode 100644 backend/internal/service/gateway_beta_test.go
diff --git a/backend/internal/service/gateway_beta_test.go b/backend/internal/service/gateway_beta_test.go
new file mode 100644
index 00000000..dd58c183
--- /dev/null
+++ b/backend/internal/service/gateway_beta_test.go
@@ -0,0 +1,23 @@
+package service
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestMergeAnthropicBeta(t *testing.T) {
+ got := mergeAnthropicBeta(
+ []string{"oauth-2025-04-20", "interleaved-thinking-2025-05-14"},
+ "foo, oauth-2025-04-20,bar, foo",
+ )
+ require.Equal(t, "oauth-2025-04-20,interleaved-thinking-2025-05-14,foo,bar", got)
+}
+
+func TestMergeAnthropicBeta_EmptyIncoming(t *testing.T) {
+ got := mergeAnthropicBeta(
+ []string{"oauth-2025-04-20", "interleaved-thinking-2025-05-14"},
+ "",
+ )
+ require.Equal(t, "oauth-2025-04-20,interleaved-thinking-2025-05-14", got)
+}
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index c23b4f36..c666c96a 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -3230,12 +3230,18 @@ func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Contex
// 处理 anthropic-beta header(OAuth 账号需要包含 oauth beta)
if tokenType == "oauth" {
if mimicClaudeCode {
- // 非 Claude Code 客户端:按 Claude Code 规则生成 beta header
+ // 非 Claude Code 客户端:按 opencode 的策略处理:
+ // - 强制 Claude Code 指纹相关请求头(尤其是 user-agent/x-stainless/x-app)
+ // - 保留 incoming beta 的同时,确保 OAuth 所需 beta 存在
+ applyClaudeCodeMimicHeaders(req, reqStream)
+
+ incomingBeta := req.Header.Get("anthropic-beta")
+ requiredBetas := []string{claude.BetaOAuth, claude.BetaInterleavedThinking}
+ // Tools 场景更严格,保留 claude-code beta 以提高 Claude Code 识别成功率。
if requestHasTools(body) {
- req.Header.Set("anthropic-beta", claude.MessageBetaHeaderWithTools)
- } else {
- req.Header.Set("anthropic-beta", claude.MessageBetaHeaderNoTools)
+ requiredBetas = append([]string{claude.BetaClaudeCode}, requiredBetas...)
}
+ req.Header.Set("anthropic-beta", mergeAnthropicBeta(requiredBetas, incomingBeta))
} else {
// Claude Code 客户端:尽量透传原始 header,仅补齐 oauth beta
clientBetaHeader := req.Header.Get("anthropic-beta")
@@ -3353,6 +3359,52 @@ func applyClaudeOAuthHeaderDefaults(req *http.Request, isStream bool) {
}
}
+func mergeAnthropicBeta(required []string, incoming string) string {
+ seen := make(map[string]struct{}, len(required)+8)
+ out := make([]string, 0, len(required)+8)
+
+ add := func(v string) {
+ v = strings.TrimSpace(v)
+ if v == "" {
+ return
+ }
+ if _, ok := seen[v]; ok {
+ return
+ }
+ seen[v] = struct{}{}
+ out = append(out, v)
+ }
+
+ for _, r := range required {
+ add(r)
+ }
+ for _, p := range strings.Split(incoming, ",") {
+ add(p)
+ }
+ return strings.Join(out, ",")
+}
+
+// applyClaudeCodeMimicHeaders forces "Claude Code-like" request headers.
+// This mirrors opencode-anthropic-auth behavior: do not trust downstream
+// headers when using Claude Code-scoped OAuth credentials.
+func applyClaudeCodeMimicHeaders(req *http.Request, isStream bool) {
+ if req == nil {
+ return
+ }
+ // Start with the standard defaults (fill missing).
+ applyClaudeOAuthHeaderDefaults(req, isStream)
+ // Then force key headers to match Claude Code fingerprint regardless of what the client sent.
+ for key, value := range claude.DefaultHeaders {
+ if value == "" {
+ continue
+ }
+ req.Header.Set(key, value)
+ }
+ if isStream {
+ req.Header.Set("x-stainless-helper-method", "stream")
+ }
+}
+
func truncateForLog(b []byte, maxBytes int) string {
if maxBytes <= 0 {
maxBytes = 2048
@@ -4600,7 +4652,11 @@ func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Con
// OAuth 账号:处理 anthropic-beta header
if tokenType == "oauth" {
if mimicClaudeCode {
- req.Header.Set("anthropic-beta", claude.CountTokensBetaHeader)
+ applyClaudeCodeMimicHeaders(req, false)
+
+ incomingBeta := req.Header.Get("anthropic-beta")
+ requiredBetas := []string{claude.BetaClaudeCode, claude.BetaOAuth, claude.BetaInterleavedThinking, claude.BetaTokenCounting}
+ req.Header.Set("anthropic-beta", mergeAnthropicBeta(requiredBetas, incomingBeta))
} else {
clientBetaHeader := req.Header.Get("anthropic-beta")
if clientBetaHeader == "" {
From c37fe91672796d2d1f44f2d1d21a91edc3232a10 Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Thu, 29 Jan 2026 02:52:26 +0800
Subject: [PATCH 51/99] fix(oauth): update Claude CLI fingerprint headers
---
backend/internal/pkg/claude/constants.go | 8 +++++---
backend/internal/service/identity_service.go | 6 +++---
2 files changed, 8 insertions(+), 6 deletions(-)
diff --git a/backend/internal/pkg/claude/constants.go b/backend/internal/pkg/claude/constants.go
index fb95ffe2..8b3441dc 100644
--- a/backend/internal/pkg/claude/constants.go
+++ b/backend/internal/pkg/claude/constants.go
@@ -40,13 +40,15 @@ const APIKeyHaikuBetaHeader = BetaInterleavedThinking
// DefaultHeaders 是 Claude Code 客户端默认请求头。
var DefaultHeaders = map[string]string{
- "User-Agent": "claude-cli/2.1.2 (external, cli)",
+ // Keep these in sync with recent Claude CLI traffic to reduce the chance
+ // that Claude Code-scoped OAuth credentials are rejected as "non-CLI" usage.
+ "User-Agent": "claude-cli/2.1.22 (external, cli)",
"X-Stainless-Lang": "js",
"X-Stainless-Package-Version": "0.70.0",
"X-Stainless-OS": "Linux",
- "X-Stainless-Arch": "x64",
+ "X-Stainless-Arch": "arm64",
"X-Stainless-Runtime": "node",
- "X-Stainless-Runtime-Version": "v24.3.0",
+ "X-Stainless-Runtime-Version": "v24.13.0",
"X-Stainless-Retry-Count": "0",
"X-Stainless-Timeout": "600",
"X-App": "cli",
diff --git a/backend/internal/service/identity_service.go b/backend/internal/service/identity_service.go
index 4e227fea..a620ac4d 100644
--- a/backend/internal/service/identity_service.go
+++ b/backend/internal/service/identity_service.go
@@ -26,13 +26,13 @@ var (
// 默认指纹值(当客户端未提供时使用)
var defaultFingerprint = Fingerprint{
- UserAgent: "claude-cli/2.1.2 (external, cli)",
+ UserAgent: "claude-cli/2.1.22 (external, cli)",
StainlessLang: "js",
StainlessPackageVersion: "0.70.0",
StainlessOS: "Linux",
- StainlessArch: "x64",
+ StainlessArch: "arm64",
StainlessRuntime: "node",
- StainlessRuntimeVersion: "v24.3.0",
+ StainlessRuntimeVersion: "v24.13.0",
}
// Fingerprint represents account fingerprint data
From d98648f03ba9a0b4308d9b7aeed1e45416ddaf71 Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Thu, 29 Jan 2026 03:03:40 +0800
Subject: [PATCH 52/99] fix: rewrite OpenCode identity sentence to Claude Code
---
.../internal/service/gateway_sanitize_test.go | 20 +++++++++++++++++++
backend/internal/service/gateway_service.go | 8 ++++++++
2 files changed, 28 insertions(+)
create mode 100644 backend/internal/service/gateway_sanitize_test.go
diff --git a/backend/internal/service/gateway_sanitize_test.go b/backend/internal/service/gateway_sanitize_test.go
new file mode 100644
index 00000000..3b0a07c9
--- /dev/null
+++ b/backend/internal/service/gateway_sanitize_test.go
@@ -0,0 +1,20 @@
+package service
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestSanitizeOpenCodeText_RewritesCanonicalSentence(t *testing.T) {
+ in := "You are OpenCode, the best coding agent on the planet."
+ got := sanitizeOpenCodeText(in)
+ require.Equal(t, strings.TrimSpace(claudeCodeSystemPrompt), got)
+}
+
+func TestSanitizeOpenCodeText_RewritesOpenCodeKeywords(t *testing.T) {
+ in := "OpenCode and opencode are mentioned."
+ got := sanitizeOpenCodeText(in)
+ require.Equal(t, "Claude Code and Claude are mentioned.", got)
+}
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index c666c96a..e17d0f0c 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -559,6 +559,14 @@ func sanitizeOpenCodeText(text string) string {
if text == "" {
return text
}
+ // Some clients include a fixed OpenCode identity sentence. Anthropic may treat
+ // this as a non-Claude-Code fingerprint, so rewrite it to the canonical
+ // Claude Code banner before generic "OpenCode"/"opencode" replacements.
+ text = strings.ReplaceAll(
+ text,
+ "You are OpenCode, the best coding agent on the planet.",
+ strings.TrimSpace(claudeCodeSystemPrompt),
+ )
text = strings.ReplaceAll(text, "OpenCode", "Claude Code")
text = opencodeTextRe.ReplaceAllString(text, "Claude")
return text
From 63412a9fcc4f9569d8338cd5ce3befbfc13604a3 Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Thu, 29 Jan 2026 03:13:14 +0800
Subject: [PATCH 53/99] chore(debug): log Claude mimic fingerprint
---
backend/internal/service/gateway_service.go | 128 ++++++++++++++++++++
1 file changed, 128 insertions(+)
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index e17d0f0c..44abdb0a 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -50,6 +50,11 @@ func (s *GatewayService) debugModelRoutingEnabled() bool {
return v == "1" || v == "true" || v == "yes" || v == "on"
}
+func (s *GatewayService) debugClaudeMimicEnabled() bool {
+ v := strings.ToLower(strings.TrimSpace(os.Getenv("SUB2API_DEBUG_CLAUDE_MIMIC")))
+ return v == "1" || v == "true" || v == "yes" || v == "on"
+}
+
func shortSessionHash(sessionHash string) string {
if sessionHash == "" {
return ""
@@ -60,6 +65,121 @@ func shortSessionHash(sessionHash string) string {
return sessionHash[:8]
}
+func redactAuthHeaderValue(v string) string {
+ v = strings.TrimSpace(v)
+ if v == "" {
+ return ""
+ }
+ // Keep scheme for debugging, redact secret.
+ if strings.HasPrefix(strings.ToLower(v), "bearer ") {
+ return "Bearer [redacted]"
+ }
+ return "[redacted]"
+}
+
+func safeHeaderValueForLog(key string, v string) string {
+ key = strings.ToLower(strings.TrimSpace(key))
+ switch key {
+ case "authorization", "x-api-key":
+ return redactAuthHeaderValue(v)
+ default:
+ return strings.TrimSpace(v)
+ }
+}
+
+func extractSystemPreviewFromBody(body []byte) string {
+ if len(body) == 0 {
+ return ""
+ }
+ sys := gjson.GetBytes(body, "system")
+ if !sys.Exists() {
+ return ""
+ }
+
+ switch {
+ case sys.IsArray():
+ for _, item := range sys.Array() {
+ if !item.IsObject() {
+ continue
+ }
+ if strings.EqualFold(item.Get("type").String(), "text") {
+ if t := item.Get("text").String(); strings.TrimSpace(t) != "" {
+ return t
+ }
+ }
+ }
+ return ""
+ case sys.Type == gjson.String:
+ return sys.String()
+ default:
+ return ""
+ }
+}
+
+func logClaudeMimicDebug(req *http.Request, body []byte, account *Account, tokenType string, mimicClaudeCode bool) {
+ if req == nil {
+ return
+ }
+
+ // Only log a minimal fingerprint to avoid leaking user content.
+ interesting := []string{
+ "user-agent",
+ "x-app",
+ "anthropic-dangerous-direct-browser-access",
+ "anthropic-version",
+ "anthropic-beta",
+ "x-stainless-lang",
+ "x-stainless-package-version",
+ "x-stainless-os",
+ "x-stainless-arch",
+ "x-stainless-runtime",
+ "x-stainless-runtime-version",
+ "x-stainless-retry-count",
+ "x-stainless-timeout",
+ "authorization",
+ "x-api-key",
+ "content-type",
+ "accept",
+ "x-stainless-helper-method",
+ }
+
+ h := make([]string, 0, len(interesting))
+ for _, k := range interesting {
+ if v := req.Header.Get(k); v != "" {
+ h = append(h, fmt.Sprintf("%s=%q", k, safeHeaderValueForLog(k, v)))
+ }
+ }
+
+ metaUserID := strings.TrimSpace(gjson.GetBytes(body, "metadata.user_id").String())
+ sysPreview := strings.TrimSpace(extractSystemPreviewFromBody(body))
+
+ // Truncate preview to keep logs sane.
+ if len(sysPreview) > 300 {
+ sysPreview = sysPreview[:300] + "..."
+ }
+ sysPreview = strings.ReplaceAll(sysPreview, "\n", "\\n")
+ sysPreview = strings.ReplaceAll(sysPreview, "\r", "\\r")
+
+ aid := int64(0)
+ aname := ""
+ if account != nil {
+ aid = account.ID
+ aname = account.Name
+ }
+
+ log.Printf(
+ "[ClaudeMimicDebug] url=%s account=%d(%s) tokenType=%s mimic=%t meta.user_id=%q system.preview=%q headers={%s}",
+ req.URL.String(),
+ aid,
+ aname,
+ tokenType,
+ mimicClaudeCode,
+ metaUserID,
+ sysPreview,
+ strings.Join(h, " "),
+ )
+}
+
// sseDataRe matches SSE data lines with optional whitespace after colon.
// Some upstream APIs return non-standard "data:" without space (should be "data: ").
var (
@@ -3264,6 +3384,10 @@ func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Contex
}
}
+ if s.debugClaudeMimicEnabled() {
+ logClaudeMimicDebug(req, body, account, tokenType, mimicClaudeCode)
+ }
+
return req, nil
}
@@ -4686,6 +4810,10 @@ func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Con
}
}
+ if s.debugClaudeMimicEnabled() {
+ logClaudeMimicDebug(req, body, account, tokenType, mimicClaudeCode)
+ }
+
return req, nil
}
From 91079d3f15a66ecd9460daa122f6b8dc65c3957b Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Thu, 29 Jan 2026 15:17:46 +0800
Subject: [PATCH 54/99] chore(debug): emit Claude mimic fingerprint on
credential-scope error
---
backend/internal/service/gateway_service.go | 64 +++++++++++++++++++--
1 file changed, 60 insertions(+), 4 deletions(-)
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 44abdb0a..b3bbfd94 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -45,6 +45,10 @@ const (
maxCacheControlBlocks = 4 // Anthropic API 允许的最大 cache_control 块数量
)
+const (
+ claudeMimicDebugInfoKey = "claude_mimic_debug_info"
+)
+
func (s *GatewayService) debugModelRoutingEnabled() bool {
v := strings.ToLower(strings.TrimSpace(os.Getenv("SUB2API_DEBUG_MODEL_ROUTING")))
return v == "1" || v == "true" || v == "yes" || v == "on"
@@ -116,9 +120,9 @@ func extractSystemPreviewFromBody(body []byte) string {
}
}
-func logClaudeMimicDebug(req *http.Request, body []byte, account *Account, tokenType string, mimicClaudeCode bool) {
+func buildClaudeMimicDebugLine(req *http.Request, body []byte, account *Account, tokenType string, mimicClaudeCode bool) string {
if req == nil {
- return
+ return ""
}
// Only log a minimal fingerprint to avoid leaking user content.
@@ -167,8 +171,8 @@ func logClaudeMimicDebug(req *http.Request, body []byte, account *Account, token
aname = account.Name
}
- log.Printf(
- "[ClaudeMimicDebug] url=%s account=%d(%s) tokenType=%s mimic=%t meta.user_id=%q system.preview=%q headers={%s}",
+ return fmt.Sprintf(
+ "url=%s account=%d(%s) tokenType=%s mimic=%t meta.user_id=%q system.preview=%q headers={%s}",
req.URL.String(),
aid,
aname,
@@ -180,6 +184,23 @@ func logClaudeMimicDebug(req *http.Request, body []byte, account *Account, token
)
}
+func logClaudeMimicDebug(req *http.Request, body []byte, account *Account, tokenType string, mimicClaudeCode bool) {
+ line := buildClaudeMimicDebugLine(req, body, account, tokenType, mimicClaudeCode)
+ if line == "" {
+ return
+ }
+ log.Printf("[ClaudeMimicDebug] %s", line)
+}
+
+func isClaudeCodeCredentialScopeError(msg string) bool {
+ m := strings.ToLower(strings.TrimSpace(msg))
+ if m == "" {
+ return false
+ }
+ return strings.Contains(m, "only authorized for use with claude code") &&
+ strings.Contains(m, "cannot be used for other api requests")
+}
+
// sseDataRe matches SSE data lines with optional whitespace after colon.
// Some upstream APIs return non-standard "data:" without space (should be "data: ").
var (
@@ -3384,6 +3405,11 @@ func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Contex
}
}
+ // Always capture a compact fingerprint line for later error diagnostics.
+ // We only print it when needed (or when the explicit debug flag is enabled).
+ if c != nil && tokenType == "oauth" {
+ c.Set(claudeMimicDebugInfoKey, buildClaudeMimicDebugLine(req, body, account, tokenType, mimicClaudeCode))
+ }
if s.debugClaudeMimicEnabled() {
logClaudeMimicDebug(req, body, account, tokenType, mimicClaudeCode)
}
@@ -3640,6 +3666,20 @@ func (s *GatewayService) handleErrorResponse(ctx context.Context, resp *http.Res
upstreamMsg := strings.TrimSpace(extractUpstreamErrorMessage(body))
upstreamMsg = sanitizeUpstreamErrorMessage(upstreamMsg)
+ // Print a compact upstream request fingerprint when we hit the Claude Code OAuth
+ // credential scope error. This avoids requiring env-var tweaks in a fixed deploy.
+ if isClaudeCodeCredentialScopeError(upstreamMsg) && c != nil {
+ if v, ok := c.Get(claudeMimicDebugInfoKey); ok {
+ if line, ok := v.(string); ok && strings.TrimSpace(line) != "" {
+ log.Printf("[ClaudeMimicDebugOnError] status=%d request_id=%s %s",
+ resp.StatusCode,
+ resp.Header.Get("x-request-id"),
+ line,
+ )
+ }
+ }
+ }
+
// Enrich Ops error logs with upstream status + message, and optionally a truncated body snippet.
upstreamDetail := ""
if s.cfg != nil && s.cfg.Gateway.LogUpstreamErrorBody {
@@ -3769,6 +3809,19 @@ func (s *GatewayService) handleRetryExhaustedError(ctx context.Context, resp *ht
upstreamMsg := strings.TrimSpace(extractUpstreamErrorMessage(respBody))
upstreamMsg = sanitizeUpstreamErrorMessage(upstreamMsg)
+
+ if isClaudeCodeCredentialScopeError(upstreamMsg) && c != nil {
+ if v, ok := c.Get(claudeMimicDebugInfoKey); ok {
+ if line, ok := v.(string); ok && strings.TrimSpace(line) != "" {
+ log.Printf("[ClaudeMimicDebugOnError] status=%d request_id=%s %s",
+ resp.StatusCode,
+ resp.Header.Get("x-request-id"),
+ line,
+ )
+ }
+ }
+ }
+
upstreamDetail := ""
if s.cfg != nil && s.cfg.Gateway.LogUpstreamErrorBody {
maxBytes := s.cfg.Gateway.LogUpstreamErrorBodyMaxBytes
@@ -4810,6 +4863,9 @@ func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Con
}
}
+ if c != nil && tokenType == "oauth" {
+ c.Set(claudeMimicDebugInfoKey, buildClaudeMimicDebugLine(req, body, account, tokenType, mimicClaudeCode))
+ }
if s.debugClaudeMimicEnabled() {
logClaudeMimicDebug(req, body, account, tokenType, mimicClaudeCode)
}
From 8375094c69a1e70d8dfbe02303357081e2606166 Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Thu, 29 Jan 2026 15:31:29 +0800
Subject: [PATCH 55/99] fix(oauth): match Claude CLI accept header and beta set
---
backend/internal/service/gateway_service.go | 31 +++++++++++++++++----
1 file changed, 26 insertions(+), 5 deletions(-)
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index b3bbfd94..8363ba66 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -3385,12 +3385,12 @@ func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Contex
applyClaudeCodeMimicHeaders(req, reqStream)
incomingBeta := req.Header.Get("anthropic-beta")
+ // Match real Claude CLI traffic (per mitmproxy reports):
+ // messages requests typically use only oauth + interleaved-thinking.
+ // Also drop claude-code beta if a downstream client added it.
requiredBetas := []string{claude.BetaOAuth, claude.BetaInterleavedThinking}
- // Tools 场景更严格,保留 claude-code beta 以提高 Claude Code 识别成功率。
- if requestHasTools(body) {
- requiredBetas = append([]string{claude.BetaClaudeCode}, requiredBetas...)
- }
- req.Header.Set("anthropic-beta", mergeAnthropicBeta(requiredBetas, incomingBeta))
+ drop := map[string]struct{}{claude.BetaClaudeCode: {}}
+ req.Header.Set("anthropic-beta", mergeAnthropicBetaDropping(requiredBetas, incomingBeta, drop))
} else {
// Claude Code 客户端:尽量透传原始 header,仅补齐 oauth beta
clientBetaHeader := req.Header.Get("anthropic-beta")
@@ -3542,6 +3542,25 @@ func mergeAnthropicBeta(required []string, incoming string) string {
return strings.Join(out, ",")
}
+func mergeAnthropicBetaDropping(required []string, incoming string, drop map[string]struct{}) string {
+ merged := mergeAnthropicBeta(required, incoming)
+ if merged == "" || len(drop) == 0 {
+ return merged
+ }
+ out := make([]string, 0, 8)
+ for _, p := range strings.Split(merged, ",") {
+ p = strings.TrimSpace(p)
+ if p == "" {
+ continue
+ }
+ if _, ok := drop[p]; ok {
+ continue
+ }
+ out = append(out, p)
+ }
+ return strings.Join(out, ",")
+}
+
// applyClaudeCodeMimicHeaders forces "Claude Code-like" request headers.
// This mirrors opencode-anthropic-auth behavior: do not trust downstream
// headers when using Claude Code-scoped OAuth credentials.
@@ -3558,6 +3577,8 @@ func applyClaudeCodeMimicHeaders(req *http.Request, isStream bool) {
}
req.Header.Set(key, value)
}
+ // Real Claude CLI uses Accept: application/json (even for streaming).
+ req.Header.Set("accept", "application/json")
if isStream {
req.Header.Set("x-stainless-helper-method", "stream")
}
From fa454b1b99f2c6866f756773c96f91cad8353173 Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Thu, 29 Jan 2026 15:37:07 +0800
Subject: [PATCH 56/99] fix: align Claude Code system banner with opencode
latest
---
backend/internal/service/gateway_service.go | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 8363ba66..47ea8593 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -39,9 +39,10 @@ const (
claudeAPICountTokensURL = "https://api.anthropic.com/v1/messages/count_tokens?beta=true"
stickySessionTTL = time.Hour // 粘性会话TTL
defaultMaxLineSize = 40 * 1024 * 1024
- // Keep a trailing blank line so that when upstream concatenates system strings,
- // the injected Claude Code banner doesn't run into the next system instruction.
- claudeCodeSystemPrompt = "You are Claude Code, Anthropic's official CLI for Claude.\n\n"
+ // Canonical Claude Code banner. Keep it EXACT (no trailing whitespace/newlines)
+ // to match real Claude CLI traffic as closely as possible. When we need a visual
+ // separator between system blocks, we add "\n\n" at concatenation time.
+ claudeCodeSystemPrompt = "You are Claude Code, Anthropic's official CLI for Claude."
maxCacheControlBlocks = 4 // Anthropic API 允许的最大 cache_control 块数量
)
From ba16ace697c6b2b65ca6c4e84818f04dd28aeabd Mon Sep 17 00:00:00 2001
From: liuxiongfeng
Date: Fri, 30 Jan 2026 08:14:52 +0800
Subject: [PATCH 57/99] chore: upgrade Antigravity User-Agent to 1.15.8
---
backend/internal/pkg/antigravity/oauth.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/backend/internal/pkg/antigravity/oauth.go b/backend/internal/pkg/antigravity/oauth.go
index ee2a6c1a..c7d657b9 100644
--- a/backend/internal/pkg/antigravity/oauth.go
+++ b/backend/internal/pkg/antigravity/oauth.go
@@ -33,7 +33,7 @@ const (
"https://www.googleapis.com/auth/experimentsandconfigs"
// User-Agent(与 Antigravity-Manager 保持一致)
- UserAgent = "antigravity/1.11.9 windows/amd64"
+ UserAgent = "antigravity/1.15.8 windows/amd64"
// Session 过期时间
SessionTTL = 30 * time.Minute
From 6599b366dc17abe62fdd79683b7ee71a06888667 Mon Sep 17 00:00:00 2001
From: shaw
Date: Fri, 30 Jan 2026 08:53:53 +0800
Subject: [PATCH 58/99] =?UTF-8?q?fix:=20=E5=8D=87=E7=BA=A7Go=E7=89=88?=
=?UTF-8?q?=E6=9C=AC=E8=87=B31.25.6=E4=BF=AE=E5=A4=8D=E6=A0=87=E5=87=86?=
=?UTF-8?q?=E5=BA=93=E5=AE=89=E5=85=A8=E6=BC=8F=E6=B4=9E?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
修复GO-2026-4341和GO-2026-4340两个标准库漏洞
---
.github/workflows/security-scan.yml | 2 +-
Dockerfile | 2 +-
backend/go.mod | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/.github/workflows/security-scan.yml b/.github/workflows/security-scan.yml
index 160a0df9..dfb8e37e 100644
--- a/.github/workflows/security-scan.yml
+++ b/.github/workflows/security-scan.yml
@@ -22,7 +22,7 @@ jobs:
cache-dependency-path: backend/go.sum
- name: Verify Go version
run: |
- go version | grep -q 'go1.25.5'
+ go version | grep -q 'go1.25.6'
- name: Run govulncheck
working-directory: backend
run: |
diff --git a/Dockerfile b/Dockerfile
index b3320300..3d4b5094 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -7,7 +7,7 @@
# =============================================================================
ARG NODE_IMAGE=node:24-alpine
-ARG GOLANG_IMAGE=golang:1.25.5-alpine
+ARG GOLANG_IMAGE=golang:1.25.6-alpine
ARG ALPINE_IMAGE=alpine:3.20
ARG GOPROXY=https://goproxy.cn,direct
ARG GOSUMDB=sum.golang.google.cn
diff --git a/backend/go.mod b/backend/go.mod
index ad7d76b6..4c3e6246 100644
--- a/backend/go.mod
+++ b/backend/go.mod
@@ -1,6 +1,6 @@
module github.com/Wei-Shaw/sub2api
-go 1.25.5
+go 1.25.6
require (
entgo.io/ent v0.14.5
From 4d8f2db92494a29b6b74d220493b02760c48befb Mon Sep 17 00:00:00 2001
From: shaw
Date: Fri, 30 Jan 2026 08:57:37 +0800
Subject: [PATCH 59/99] =?UTF-8?q?fix:=20=E6=9B=B4=E6=96=B0=E6=89=80?=
=?UTF-8?q?=E6=9C=89CI=20workflow=E7=9A=84Go=E7=89=88=E6=9C=AC=E9=AA=8C?=
=?UTF-8?q?=E8=AF=81=E8=87=B31.25.6?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.github/workflows/backend-ci.yml | 4 ++--
.github/workflows/release.yml | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/.github/workflows/backend-ci.yml b/.github/workflows/backend-ci.yml
index 3ea8860a..e5624f86 100644
--- a/.github/workflows/backend-ci.yml
+++ b/.github/workflows/backend-ci.yml
@@ -19,7 +19,7 @@ jobs:
cache: true
- name: Verify Go version
run: |
- go version | grep -q 'go1.25.5'
+ go version | grep -q 'go1.25.6'
- name: Unit tests
working-directory: backend
run: make test-unit
@@ -38,7 +38,7 @@ jobs:
cache: true
- name: Verify Go version
run: |
- go version | grep -q 'go1.25.5'
+ go version | grep -q 'go1.25.6'
- name: golangci-lint
uses: golangci/golangci-lint-action@v9
with:
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 0415000d..f45c1a0b 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -115,7 +115,7 @@ jobs:
- name: Verify Go version
run: |
- go version | grep -q 'go1.25.5'
+ go version | grep -q 'go1.25.6'
# Docker setup for GoReleaser
- name: Set up QEMU
From b7f69844e1f8eada74167848dfa8d2456792d639 Mon Sep 17 00:00:00 2001
From: ducky
Date: Fri, 30 Jan 2026 16:45:04 +0800
Subject: [PATCH 60/99] feat(announcements): add admin/user announcement system
Implements announcements end-to-end (admin CRUD + read status, user list + mark read) with OR-of-AND targeting. Also breaks the ent<->service import cycle by moving schema-facing constants/targeting into a new domain package.
---
backend/cmd/server/wire_gen.go | 9 +-
backend/ent/announcement.go | 249 +++
backend/ent/announcement/announcement.go | 164 ++
backend/ent/announcement/where.go | 624 ++++++
backend/ent/announcement_create.go | 1159 +++++++++++
backend/ent/announcement_delete.go | 88 +
backend/ent/announcement_query.go | 643 ++++++
backend/ent/announcement_update.go | 824 ++++++++
backend/ent/announcementread.go | 185 ++
.../ent/announcementread/announcementread.go | 127 ++
backend/ent/announcementread/where.go | 257 +++
backend/ent/announcementread_create.go | 660 +++++++
backend/ent/announcementread_delete.go | 88 +
backend/ent/announcementread_query.go | 718 +++++++
backend/ent/announcementread_update.go | 456 +++++
backend/ent/client.go | 376 +++-
backend/ent/ent.go | 4 +
backend/ent/hook/hook.go | 24 +
backend/ent/intercept/intercept.go | 60 +
backend/ent/migrate/schema.go | 102 +
backend/ent/mutation.go | 1759 ++++++++++++++++-
backend/ent/predicate/predicate.go | 6 +
backend/ent/runtime/runtime.go | 52 +
backend/ent/schema/account.go | 4 +-
backend/ent/schema/announcement.go | 91 +
backend/ent/schema/announcement_read.go | 66 +
backend/ent/schema/api_key.go | 4 +-
backend/ent/schema/group.go | 8 +-
backend/ent/schema/promo_code.go | 4 +-
backend/ent/schema/redeem_code.go | 6 +-
backend/ent/schema/user.go | 7 +-
backend/ent/schema/user_subscription.go | 4 +-
backend/ent/tx.go | 6 +
backend/ent/user.go | 28 +-
backend/ent/user/user.go | 30 +
backend/ent/user/where.go | 23 +
backend/ent/user_create.go | 32 +
backend/ent/user_query.go | 76 +-
backend/ent/user_update.go | 163 ++
backend/internal/domain/announcement.go | 226 +++
backend/internal/domain/constants.go | 64 +
.../handler/admin/announcement_handler.go | 247 +++
.../internal/handler/announcement_handler.go | 82 +
backend/internal/handler/dto/announcement.go | 75 +
backend/internal/handler/handler.go | 2 +
backend/internal/handler/wire.go | 6 +
.../repository/announcement_read_repo.go | 84 +
.../internal/repository/announcement_repo.go | 195 ++
backend/internal/repository/wire.go | 2 +
backend/internal/server/routes/admin.go | 15 +
backend/internal/server/routes/user.go | 7 +
backend/internal/service/announcement.go | 64 +
.../internal/service/announcement_service.go | 378 ++++
.../service/announcement_targeting_test.go | 67 +
backend/internal/service/domain_constants.go | 56 +-
backend/internal/service/wire.go | 1 +
backend/migrations/045_add_announcements.sql | 44 +
frontend/src/api/admin/announcements.ts | 71 +
frontend/src/api/admin/index.ts | 3 +
frontend/src/api/announcements.ts | 26 +
frontend/src/api/index.ts | 1 +
.../AnnouncementReadStatusDialog.vue | 186 ++
.../AnnouncementTargetingEditor.vue | 388 ++++
frontend/src/components/layout/AppSidebar.vue | 18 +
frontend/src/i18n/locales/en.ts | 83 +
frontend/src/i18n/locales/zh.ts | 83 +
frontend/src/router/index.ts | 24 +
frontend/src/types/index.ts | 75 +
.../src/views/admin/AnnouncementsView.vue | 538 +++++
frontend/src/views/user/AnnouncementsView.vue | 140 ++
70 files changed, 12366 insertions(+), 71 deletions(-)
create mode 100644 backend/ent/announcement.go
create mode 100644 backend/ent/announcement/announcement.go
create mode 100644 backend/ent/announcement/where.go
create mode 100644 backend/ent/announcement_create.go
create mode 100644 backend/ent/announcement_delete.go
create mode 100644 backend/ent/announcement_query.go
create mode 100644 backend/ent/announcement_update.go
create mode 100644 backend/ent/announcementread.go
create mode 100644 backend/ent/announcementread/announcementread.go
create mode 100644 backend/ent/announcementread/where.go
create mode 100644 backend/ent/announcementread_create.go
create mode 100644 backend/ent/announcementread_delete.go
create mode 100644 backend/ent/announcementread_query.go
create mode 100644 backend/ent/announcementread_update.go
create mode 100644 backend/ent/schema/announcement.go
create mode 100644 backend/ent/schema/announcement_read.go
create mode 100644 backend/internal/domain/announcement.go
create mode 100644 backend/internal/domain/constants.go
create mode 100644 backend/internal/handler/admin/announcement_handler.go
create mode 100644 backend/internal/handler/announcement_handler.go
create mode 100644 backend/internal/handler/dto/announcement.go
create mode 100644 backend/internal/repository/announcement_read_repo.go
create mode 100644 backend/internal/repository/announcement_repo.go
create mode 100644 backend/internal/service/announcement.go
create mode 100644 backend/internal/service/announcement_service.go
create mode 100644 backend/internal/service/announcement_targeting_test.go
create mode 100644 backend/migrations/045_add_announcements.sql
create mode 100644 frontend/src/api/admin/announcements.ts
create mode 100644 frontend/src/api/announcements.ts
create mode 100644 frontend/src/components/admin/announcements/AnnouncementReadStatusDialog.vue
create mode 100644 frontend/src/components/admin/announcements/AnnouncementTargetingEditor.vue
create mode 100644 frontend/src/views/admin/AnnouncementsView.vue
create mode 100644 frontend/src/views/user/AnnouncementsView.vue
diff --git a/backend/cmd/server/wire_gen.go b/backend/cmd/server/wire_gen.go
index 71624091..7d465fee 100644
--- a/backend/cmd/server/wire_gen.go
+++ b/backend/cmd/server/wire_gen.go
@@ -81,6 +81,10 @@ func initializeApplication(buildInfo handler.BuildInfo) (*Application, error) {
redeemService := service.NewRedeemService(redeemCodeRepository, userRepository, subscriptionService, redeemCache, billingCacheService, client, apiKeyAuthCacheInvalidator)
redeemHandler := handler.NewRedeemHandler(redeemService)
subscriptionHandler := handler.NewSubscriptionHandler(subscriptionService)
+ announcementRepository := repository.NewAnnouncementRepository(client)
+ announcementReadRepository := repository.NewAnnouncementReadRepository(client)
+ announcementService := service.NewAnnouncementService(announcementRepository, announcementReadRepository, userRepository, userSubscriptionRepository)
+ announcementHandler := handler.NewAnnouncementHandler(announcementService)
dashboardAggregationRepository := repository.NewDashboardAggregationRepository(db)
dashboardStatsCache := repository.NewDashboardCache(redisClient, configConfig)
dashboardService := service.NewDashboardService(usageLogRepository, dashboardAggregationRepository, dashboardStatsCache, configConfig)
@@ -128,6 +132,7 @@ func initializeApplication(buildInfo handler.BuildInfo) (*Application, error) {
crsSyncService := service.NewCRSSyncService(accountRepository, proxyRepository, oAuthService, openAIOAuthService, geminiOAuthService, configConfig)
sessionLimitCache := repository.ProvideSessionLimitCache(redisClient, configConfig)
accountHandler := admin.NewAccountHandler(adminService, oAuthService, openAIOAuthService, geminiOAuthService, antigravityOAuthService, rateLimitService, accountUsageService, accountTestService, concurrencyService, crsSyncService, sessionLimitCache, compositeTokenCacheInvalidator)
+ adminAnnouncementHandler := admin.NewAnnouncementHandler(announcementService)
oAuthHandler := admin.NewOAuthHandler(oAuthService)
openAIOAuthHandler := admin.NewOpenAIOAuthHandler(openAIOAuthService, adminService)
geminiOAuthHandler := admin.NewGeminiOAuthHandler(geminiOAuthService)
@@ -167,12 +172,12 @@ func initializeApplication(buildInfo handler.BuildInfo) (*Application, error) {
userAttributeValueRepository := repository.NewUserAttributeValueRepository(client)
userAttributeService := service.NewUserAttributeService(userAttributeDefinitionRepository, userAttributeValueRepository)
userAttributeHandler := admin.NewUserAttributeHandler(userAttributeService)
- adminHandlers := handler.ProvideAdminHandlers(dashboardHandler, adminUserHandler, groupHandler, accountHandler, oAuthHandler, openAIOAuthHandler, geminiOAuthHandler, antigravityOAuthHandler, proxyHandler, adminRedeemHandler, promoHandler, settingHandler, opsHandler, systemHandler, adminSubscriptionHandler, adminUsageHandler, userAttributeHandler)
+ adminHandlers := handler.ProvideAdminHandlers(dashboardHandler, adminUserHandler, groupHandler, accountHandler, adminAnnouncementHandler, oAuthHandler, openAIOAuthHandler, geminiOAuthHandler, antigravityOAuthHandler, proxyHandler, adminRedeemHandler, promoHandler, settingHandler, opsHandler, systemHandler, adminSubscriptionHandler, adminUsageHandler, userAttributeHandler)
gatewayHandler := handler.NewGatewayHandler(gatewayService, geminiMessagesCompatService, antigravityGatewayService, userService, concurrencyService, billingCacheService, configConfig)
openAIGatewayHandler := handler.NewOpenAIGatewayHandler(openAIGatewayService, concurrencyService, billingCacheService, configConfig)
handlerSettingHandler := handler.ProvideSettingHandler(settingService, buildInfo)
totpHandler := handler.NewTotpHandler(totpService)
- handlers := handler.ProvideHandlers(authHandler, userHandler, apiKeyHandler, usageHandler, redeemHandler, subscriptionHandler, adminHandlers, gatewayHandler, openAIGatewayHandler, handlerSettingHandler, totpHandler)
+ handlers := handler.ProvideHandlers(authHandler, userHandler, apiKeyHandler, usageHandler, redeemHandler, subscriptionHandler, announcementHandler, adminHandlers, gatewayHandler, openAIGatewayHandler, handlerSettingHandler, totpHandler)
jwtAuthMiddleware := middleware.NewJWTAuthMiddleware(authService, userService)
adminAuthMiddleware := middleware.NewAdminAuthMiddleware(authService, userService, settingService)
apiKeyAuthMiddleware := middleware.NewAPIKeyAuthMiddleware(apiKeyService, subscriptionService, configConfig)
diff --git a/backend/ent/announcement.go b/backend/ent/announcement.go
new file mode 100644
index 00000000..93d7a375
--- /dev/null
+++ b/backend/ent/announcement.go
@@ -0,0 +1,249 @@
+// Code generated by ent, DO NOT EDIT.
+
+package ent
+
+import (
+ "encoding/json"
+ "fmt"
+ "strings"
+ "time"
+
+ "entgo.io/ent"
+ "entgo.io/ent/dialect/sql"
+ "github.com/Wei-Shaw/sub2api/ent/announcement"
+ "github.com/Wei-Shaw/sub2api/internal/domain"
+)
+
+// Announcement is the model entity for the Announcement schema.
+type Announcement struct {
+ config `json:"-"`
+ // ID of the ent.
+ ID int64 `json:"id,omitempty"`
+ // 公告标题
+ Title string `json:"title,omitempty"`
+ // 公告内容(支持 Markdown)
+ Content string `json:"content,omitempty"`
+ // 状态: draft, active, archived
+ Status string `json:"status,omitempty"`
+ // 展示条件(JSON 规则)
+ Targeting domain.AnnouncementTargeting `json:"targeting,omitempty"`
+ // 开始展示时间(为空表示立即生效)
+ StartsAt *time.Time `json:"starts_at,omitempty"`
+ // 结束展示时间(为空表示永久生效)
+ EndsAt *time.Time `json:"ends_at,omitempty"`
+ // 创建人用户ID(管理员)
+ CreatedBy *int64 `json:"created_by,omitempty"`
+ // 更新人用户ID(管理员)
+ UpdatedBy *int64 `json:"updated_by,omitempty"`
+ // CreatedAt holds the value of the "created_at" field.
+ CreatedAt time.Time `json:"created_at,omitempty"`
+ // UpdatedAt holds the value of the "updated_at" field.
+ UpdatedAt time.Time `json:"updated_at,omitempty"`
+ // Edges holds the relations/edges for other nodes in the graph.
+ // The values are being populated by the AnnouncementQuery when eager-loading is set.
+ Edges AnnouncementEdges `json:"edges"`
+ selectValues sql.SelectValues
+}
+
+// AnnouncementEdges holds the relations/edges for other nodes in the graph.
+type AnnouncementEdges struct {
+ // Reads holds the value of the reads edge.
+ Reads []*AnnouncementRead `json:"reads,omitempty"`
+ // loadedTypes holds the information for reporting if a
+ // type was loaded (or requested) in eager-loading or not.
+ loadedTypes [1]bool
+}
+
+// ReadsOrErr returns the Reads value or an error if the edge
+// was not loaded in eager-loading.
+func (e AnnouncementEdges) ReadsOrErr() ([]*AnnouncementRead, error) {
+ if e.loadedTypes[0] {
+ return e.Reads, nil
+ }
+ return nil, &NotLoadedError{edge: "reads"}
+}
+
+// scanValues returns the types for scanning values from sql.Rows.
+func (*Announcement) scanValues(columns []string) ([]any, error) {
+ values := make([]any, len(columns))
+ for i := range columns {
+ switch columns[i] {
+ case announcement.FieldTargeting:
+ values[i] = new([]byte)
+ case announcement.FieldID, announcement.FieldCreatedBy, announcement.FieldUpdatedBy:
+ values[i] = new(sql.NullInt64)
+ case announcement.FieldTitle, announcement.FieldContent, announcement.FieldStatus:
+ values[i] = new(sql.NullString)
+ case announcement.FieldStartsAt, announcement.FieldEndsAt, announcement.FieldCreatedAt, announcement.FieldUpdatedAt:
+ values[i] = new(sql.NullTime)
+ default:
+ values[i] = new(sql.UnknownType)
+ }
+ }
+ return values, nil
+}
+
+// assignValues assigns the values that were returned from sql.Rows (after scanning)
+// to the Announcement fields.
+func (_m *Announcement) assignValues(columns []string, values []any) error {
+ if m, n := len(values), len(columns); m < n {
+ return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
+ }
+ for i := range columns {
+ switch columns[i] {
+ case announcement.FieldID:
+ value, ok := values[i].(*sql.NullInt64)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field id", value)
+ }
+ _m.ID = int64(value.Int64)
+ case announcement.FieldTitle:
+ if value, ok := values[i].(*sql.NullString); !ok {
+ return fmt.Errorf("unexpected type %T for field title", values[i])
+ } else if value.Valid {
+ _m.Title = value.String
+ }
+ case announcement.FieldContent:
+ if value, ok := values[i].(*sql.NullString); !ok {
+ return fmt.Errorf("unexpected type %T for field content", values[i])
+ } else if value.Valid {
+ _m.Content = value.String
+ }
+ case announcement.FieldStatus:
+ if value, ok := values[i].(*sql.NullString); !ok {
+ return fmt.Errorf("unexpected type %T for field status", values[i])
+ } else if value.Valid {
+ _m.Status = value.String
+ }
+ case announcement.FieldTargeting:
+ if value, ok := values[i].(*[]byte); !ok {
+ return fmt.Errorf("unexpected type %T for field targeting", values[i])
+ } else if value != nil && len(*value) > 0 {
+ if err := json.Unmarshal(*value, &_m.Targeting); err != nil {
+ return fmt.Errorf("unmarshal field targeting: %w", err)
+ }
+ }
+ case announcement.FieldStartsAt:
+ if value, ok := values[i].(*sql.NullTime); !ok {
+ return fmt.Errorf("unexpected type %T for field starts_at", values[i])
+ } else if value.Valid {
+ _m.StartsAt = new(time.Time)
+ *_m.StartsAt = value.Time
+ }
+ case announcement.FieldEndsAt:
+ if value, ok := values[i].(*sql.NullTime); !ok {
+ return fmt.Errorf("unexpected type %T for field ends_at", values[i])
+ } else if value.Valid {
+ _m.EndsAt = new(time.Time)
+ *_m.EndsAt = value.Time
+ }
+ case announcement.FieldCreatedBy:
+ if value, ok := values[i].(*sql.NullInt64); !ok {
+ return fmt.Errorf("unexpected type %T for field created_by", values[i])
+ } else if value.Valid {
+ _m.CreatedBy = new(int64)
+ *_m.CreatedBy = value.Int64
+ }
+ case announcement.FieldUpdatedBy:
+ if value, ok := values[i].(*sql.NullInt64); !ok {
+ return fmt.Errorf("unexpected type %T for field updated_by", values[i])
+ } else if value.Valid {
+ _m.UpdatedBy = new(int64)
+ *_m.UpdatedBy = value.Int64
+ }
+ case announcement.FieldCreatedAt:
+ if value, ok := values[i].(*sql.NullTime); !ok {
+ return fmt.Errorf("unexpected type %T for field created_at", values[i])
+ } else if value.Valid {
+ _m.CreatedAt = value.Time
+ }
+ case announcement.FieldUpdatedAt:
+ if value, ok := values[i].(*sql.NullTime); !ok {
+ return fmt.Errorf("unexpected type %T for field updated_at", values[i])
+ } else if value.Valid {
+ _m.UpdatedAt = value.Time
+ }
+ default:
+ _m.selectValues.Set(columns[i], values[i])
+ }
+ }
+ return nil
+}
+
+// Value returns the ent.Value that was dynamically selected and assigned to the Announcement.
+// This includes values selected through modifiers, order, etc.
+func (_m *Announcement) Value(name string) (ent.Value, error) {
+ return _m.selectValues.Get(name)
+}
+
+// QueryReads queries the "reads" edge of the Announcement entity.
+func (_m *Announcement) QueryReads() *AnnouncementReadQuery {
+ return NewAnnouncementClient(_m.config).QueryReads(_m)
+}
+
+// Update returns a builder for updating this Announcement.
+// Note that you need to call Announcement.Unwrap() before calling this method if this Announcement
+// was returned from a transaction, and the transaction was committed or rolled back.
+func (_m *Announcement) Update() *AnnouncementUpdateOne {
+ return NewAnnouncementClient(_m.config).UpdateOne(_m)
+}
+
+// Unwrap unwraps the Announcement entity that was returned from a transaction after it was closed,
+// so that all future queries will be executed through the driver which created the transaction.
+func (_m *Announcement) Unwrap() *Announcement {
+ _tx, ok := _m.config.driver.(*txDriver)
+ if !ok {
+ panic("ent: Announcement is not a transactional entity")
+ }
+ _m.config.driver = _tx.drv
+ return _m
+}
+
+// String implements the fmt.Stringer.
+func (_m *Announcement) String() string {
+ var builder strings.Builder
+ builder.WriteString("Announcement(")
+ builder.WriteString(fmt.Sprintf("id=%v, ", _m.ID))
+ builder.WriteString("title=")
+ builder.WriteString(_m.Title)
+ builder.WriteString(", ")
+ builder.WriteString("content=")
+ builder.WriteString(_m.Content)
+ builder.WriteString(", ")
+ builder.WriteString("status=")
+ builder.WriteString(_m.Status)
+ builder.WriteString(", ")
+ builder.WriteString("targeting=")
+ builder.WriteString(fmt.Sprintf("%v", _m.Targeting))
+ builder.WriteString(", ")
+ if v := _m.StartsAt; v != nil {
+ builder.WriteString("starts_at=")
+ builder.WriteString(v.Format(time.ANSIC))
+ }
+ builder.WriteString(", ")
+ if v := _m.EndsAt; v != nil {
+ builder.WriteString("ends_at=")
+ builder.WriteString(v.Format(time.ANSIC))
+ }
+ builder.WriteString(", ")
+ if v := _m.CreatedBy; v != nil {
+ builder.WriteString("created_by=")
+ builder.WriteString(fmt.Sprintf("%v", *v))
+ }
+ builder.WriteString(", ")
+ if v := _m.UpdatedBy; v != nil {
+ builder.WriteString("updated_by=")
+ builder.WriteString(fmt.Sprintf("%v", *v))
+ }
+ builder.WriteString(", ")
+ builder.WriteString("created_at=")
+ builder.WriteString(_m.CreatedAt.Format(time.ANSIC))
+ builder.WriteString(", ")
+ builder.WriteString("updated_at=")
+ builder.WriteString(_m.UpdatedAt.Format(time.ANSIC))
+ builder.WriteByte(')')
+ return builder.String()
+}
+
+// Announcements is a parsable slice of Announcement.
+type Announcements []*Announcement
diff --git a/backend/ent/announcement/announcement.go b/backend/ent/announcement/announcement.go
new file mode 100644
index 00000000..4f34ee05
--- /dev/null
+++ b/backend/ent/announcement/announcement.go
@@ -0,0 +1,164 @@
+// Code generated by ent, DO NOT EDIT.
+
+package announcement
+
+import (
+ "time"
+
+ "entgo.io/ent/dialect/sql"
+ "entgo.io/ent/dialect/sql/sqlgraph"
+)
+
+const (
+ // Label holds the string label denoting the announcement type in the database.
+ Label = "announcement"
+ // FieldID holds the string denoting the id field in the database.
+ FieldID = "id"
+ // FieldTitle holds the string denoting the title field in the database.
+ FieldTitle = "title"
+ // FieldContent holds the string denoting the content field in the database.
+ FieldContent = "content"
+ // FieldStatus holds the string denoting the status field in the database.
+ FieldStatus = "status"
+ // FieldTargeting holds the string denoting the targeting field in the database.
+ FieldTargeting = "targeting"
+ // FieldStartsAt holds the string denoting the starts_at field in the database.
+ FieldStartsAt = "starts_at"
+ // FieldEndsAt holds the string denoting the ends_at field in the database.
+ FieldEndsAt = "ends_at"
+ // FieldCreatedBy holds the string denoting the created_by field in the database.
+ FieldCreatedBy = "created_by"
+ // FieldUpdatedBy holds the string denoting the updated_by field in the database.
+ FieldUpdatedBy = "updated_by"
+ // FieldCreatedAt holds the string denoting the created_at field in the database.
+ FieldCreatedAt = "created_at"
+ // FieldUpdatedAt holds the string denoting the updated_at field in the database.
+ FieldUpdatedAt = "updated_at"
+ // EdgeReads holds the string denoting the reads edge name in mutations.
+ EdgeReads = "reads"
+ // Table holds the table name of the announcement in the database.
+ Table = "announcements"
+ // ReadsTable is the table that holds the reads relation/edge.
+ ReadsTable = "announcement_reads"
+ // ReadsInverseTable is the table name for the AnnouncementRead entity.
+ // It exists in this package in order to avoid circular dependency with the "announcementread" package.
+ ReadsInverseTable = "announcement_reads"
+ // ReadsColumn is the table column denoting the reads relation/edge.
+ ReadsColumn = "announcement_id"
+)
+
+// Columns holds all SQL columns for announcement fields.
+var Columns = []string{
+ FieldID,
+ FieldTitle,
+ FieldContent,
+ FieldStatus,
+ FieldTargeting,
+ FieldStartsAt,
+ FieldEndsAt,
+ FieldCreatedBy,
+ FieldUpdatedBy,
+ FieldCreatedAt,
+ FieldUpdatedAt,
+}
+
+// ValidColumn reports if the column name is valid (part of the table columns).
+func ValidColumn(column string) bool {
+ for i := range Columns {
+ if column == Columns[i] {
+ return true
+ }
+ }
+ return false
+}
+
+var (
+ // TitleValidator is a validator for the "title" field. It is called by the builders before save.
+ TitleValidator func(string) error
+ // ContentValidator is a validator for the "content" field. It is called by the builders before save.
+ ContentValidator func(string) error
+ // DefaultStatus holds the default value on creation for the "status" field.
+ DefaultStatus string
+ // StatusValidator is a validator for the "status" field. It is called by the builders before save.
+ StatusValidator func(string) error
+ // DefaultCreatedAt holds the default value on creation for the "created_at" field.
+ DefaultCreatedAt func() time.Time
+ // DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
+ DefaultUpdatedAt func() time.Time
+ // UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
+ UpdateDefaultUpdatedAt func() time.Time
+)
+
+// OrderOption defines the ordering options for the Announcement queries.
+type OrderOption func(*sql.Selector)
+
+// ByID orders the results by the id field.
+func ByID(opts ...sql.OrderTermOption) OrderOption {
+ return sql.OrderByField(FieldID, opts...).ToFunc()
+}
+
+// ByTitle orders the results by the title field.
+func ByTitle(opts ...sql.OrderTermOption) OrderOption {
+ return sql.OrderByField(FieldTitle, opts...).ToFunc()
+}
+
+// ByContent orders the results by the content field.
+func ByContent(opts ...sql.OrderTermOption) OrderOption {
+ return sql.OrderByField(FieldContent, opts...).ToFunc()
+}
+
+// ByStatus orders the results by the status field.
+func ByStatus(opts ...sql.OrderTermOption) OrderOption {
+ return sql.OrderByField(FieldStatus, opts...).ToFunc()
+}
+
+// ByStartsAt orders the results by the starts_at field.
+func ByStartsAt(opts ...sql.OrderTermOption) OrderOption {
+ return sql.OrderByField(FieldStartsAt, opts...).ToFunc()
+}
+
+// ByEndsAt orders the results by the ends_at field.
+func ByEndsAt(opts ...sql.OrderTermOption) OrderOption {
+ return sql.OrderByField(FieldEndsAt, opts...).ToFunc()
+}
+
+// ByCreatedBy orders the results by the created_by field.
+func ByCreatedBy(opts ...sql.OrderTermOption) OrderOption {
+ return sql.OrderByField(FieldCreatedBy, opts...).ToFunc()
+}
+
+// ByUpdatedBy orders the results by the updated_by field.
+func ByUpdatedBy(opts ...sql.OrderTermOption) OrderOption {
+ return sql.OrderByField(FieldUpdatedBy, opts...).ToFunc()
+}
+
+// ByCreatedAt orders the results by the created_at field.
+func ByCreatedAt(opts ...sql.OrderTermOption) OrderOption {
+ return sql.OrderByField(FieldCreatedAt, opts...).ToFunc()
+}
+
+// ByUpdatedAt orders the results by the updated_at field.
+func ByUpdatedAt(opts ...sql.OrderTermOption) OrderOption {
+ return sql.OrderByField(FieldUpdatedAt, opts...).ToFunc()
+}
+
+// ByReadsCount orders the results by reads count.
+func ByReadsCount(opts ...sql.OrderTermOption) OrderOption {
+ return func(s *sql.Selector) {
+ sqlgraph.OrderByNeighborsCount(s, newReadsStep(), opts...)
+ }
+}
+
+// ByReads orders the results by reads terms.
+func ByReads(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption {
+ return func(s *sql.Selector) {
+ sqlgraph.OrderByNeighborTerms(s, newReadsStep(), append([]sql.OrderTerm{term}, terms...)...)
+ }
+}
+func newReadsStep() *sqlgraph.Step {
+ return sqlgraph.NewStep(
+ sqlgraph.From(Table, FieldID),
+ sqlgraph.To(ReadsInverseTable, FieldID),
+ sqlgraph.Edge(sqlgraph.O2M, false, ReadsTable, ReadsColumn),
+ )
+}
diff --git a/backend/ent/announcement/where.go b/backend/ent/announcement/where.go
new file mode 100644
index 00000000..d3cad2a5
--- /dev/null
+++ b/backend/ent/announcement/where.go
@@ -0,0 +1,624 @@
+// Code generated by ent, DO NOT EDIT.
+
+package announcement
+
+import (
+ "time"
+
+ "entgo.io/ent/dialect/sql"
+ "entgo.io/ent/dialect/sql/sqlgraph"
+ "github.com/Wei-Shaw/sub2api/ent/predicate"
+)
+
+// ID filters vertices based on their ID field.
+func ID(id int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldID, id))
+}
+
+// IDEQ applies the EQ predicate on the ID field.
+func IDEQ(id int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldID, id))
+}
+
+// IDNEQ applies the NEQ predicate on the ID field.
+func IDNEQ(id int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNEQ(FieldID, id))
+}
+
+// IDIn applies the In predicate on the ID field.
+func IDIn(ids ...int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldIn(FieldID, ids...))
+}
+
+// IDNotIn applies the NotIn predicate on the ID field.
+func IDNotIn(ids ...int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNotIn(FieldID, ids...))
+}
+
+// IDGT applies the GT predicate on the ID field.
+func IDGT(id int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGT(FieldID, id))
+}
+
+// IDGTE applies the GTE predicate on the ID field.
+func IDGTE(id int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGTE(FieldID, id))
+}
+
+// IDLT applies the LT predicate on the ID field.
+func IDLT(id int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLT(FieldID, id))
+}
+
+// IDLTE applies the LTE predicate on the ID field.
+func IDLTE(id int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLTE(FieldID, id))
+}
+
+// Title applies equality check predicate on the "title" field. It's identical to TitleEQ.
+func Title(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldTitle, v))
+}
+
+// Content applies equality check predicate on the "content" field. It's identical to ContentEQ.
+func Content(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldContent, v))
+}
+
+// Status applies equality check predicate on the "status" field. It's identical to StatusEQ.
+func Status(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldStatus, v))
+}
+
+// StartsAt applies equality check predicate on the "starts_at" field. It's identical to StartsAtEQ.
+func StartsAt(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldStartsAt, v))
+}
+
+// EndsAt applies equality check predicate on the "ends_at" field. It's identical to EndsAtEQ.
+func EndsAt(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldEndsAt, v))
+}
+
+// CreatedBy applies equality check predicate on the "created_by" field. It's identical to CreatedByEQ.
+func CreatedBy(v int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldCreatedBy, v))
+}
+
+// UpdatedBy applies equality check predicate on the "updated_by" field. It's identical to UpdatedByEQ.
+func UpdatedBy(v int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldUpdatedBy, v))
+}
+
+// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ.
+func CreatedAt(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldCreatedAt, v))
+}
+
+// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ.
+func UpdatedAt(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldUpdatedAt, v))
+}
+
+// TitleEQ applies the EQ predicate on the "title" field.
+func TitleEQ(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldTitle, v))
+}
+
+// TitleNEQ applies the NEQ predicate on the "title" field.
+func TitleNEQ(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNEQ(FieldTitle, v))
+}
+
+// TitleIn applies the In predicate on the "title" field.
+func TitleIn(vs ...string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldIn(FieldTitle, vs...))
+}
+
+// TitleNotIn applies the NotIn predicate on the "title" field.
+func TitleNotIn(vs ...string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNotIn(FieldTitle, vs...))
+}
+
+// TitleGT applies the GT predicate on the "title" field.
+func TitleGT(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGT(FieldTitle, v))
+}
+
+// TitleGTE applies the GTE predicate on the "title" field.
+func TitleGTE(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGTE(FieldTitle, v))
+}
+
+// TitleLT applies the LT predicate on the "title" field.
+func TitleLT(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLT(FieldTitle, v))
+}
+
+// TitleLTE applies the LTE predicate on the "title" field.
+func TitleLTE(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLTE(FieldTitle, v))
+}
+
+// TitleContains applies the Contains predicate on the "title" field.
+func TitleContains(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldContains(FieldTitle, v))
+}
+
+// TitleHasPrefix applies the HasPrefix predicate on the "title" field.
+func TitleHasPrefix(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldHasPrefix(FieldTitle, v))
+}
+
+// TitleHasSuffix applies the HasSuffix predicate on the "title" field.
+func TitleHasSuffix(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldHasSuffix(FieldTitle, v))
+}
+
+// TitleEqualFold applies the EqualFold predicate on the "title" field.
+func TitleEqualFold(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEqualFold(FieldTitle, v))
+}
+
+// TitleContainsFold applies the ContainsFold predicate on the "title" field.
+func TitleContainsFold(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldContainsFold(FieldTitle, v))
+}
+
+// ContentEQ applies the EQ predicate on the "content" field.
+func ContentEQ(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldContent, v))
+}
+
+// ContentNEQ applies the NEQ predicate on the "content" field.
+func ContentNEQ(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNEQ(FieldContent, v))
+}
+
+// ContentIn applies the In predicate on the "content" field.
+func ContentIn(vs ...string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldIn(FieldContent, vs...))
+}
+
+// ContentNotIn applies the NotIn predicate on the "content" field.
+func ContentNotIn(vs ...string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNotIn(FieldContent, vs...))
+}
+
+// ContentGT applies the GT predicate on the "content" field.
+func ContentGT(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGT(FieldContent, v))
+}
+
+// ContentGTE applies the GTE predicate on the "content" field.
+func ContentGTE(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGTE(FieldContent, v))
+}
+
+// ContentLT applies the LT predicate on the "content" field.
+func ContentLT(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLT(FieldContent, v))
+}
+
+// ContentLTE applies the LTE predicate on the "content" field.
+func ContentLTE(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLTE(FieldContent, v))
+}
+
+// ContentContains applies the Contains predicate on the "content" field.
+func ContentContains(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldContains(FieldContent, v))
+}
+
+// ContentHasPrefix applies the HasPrefix predicate on the "content" field.
+func ContentHasPrefix(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldHasPrefix(FieldContent, v))
+}
+
+// ContentHasSuffix applies the HasSuffix predicate on the "content" field.
+func ContentHasSuffix(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldHasSuffix(FieldContent, v))
+}
+
+// ContentEqualFold applies the EqualFold predicate on the "content" field.
+func ContentEqualFold(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEqualFold(FieldContent, v))
+}
+
+// ContentContainsFold applies the ContainsFold predicate on the "content" field.
+func ContentContainsFold(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldContainsFold(FieldContent, v))
+}
+
+// StatusEQ applies the EQ predicate on the "status" field.
+func StatusEQ(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldStatus, v))
+}
+
+// StatusNEQ applies the NEQ predicate on the "status" field.
+func StatusNEQ(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNEQ(FieldStatus, v))
+}
+
+// StatusIn applies the In predicate on the "status" field.
+func StatusIn(vs ...string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldIn(FieldStatus, vs...))
+}
+
+// StatusNotIn applies the NotIn predicate on the "status" field.
+func StatusNotIn(vs ...string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNotIn(FieldStatus, vs...))
+}
+
+// StatusGT applies the GT predicate on the "status" field.
+func StatusGT(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGT(FieldStatus, v))
+}
+
+// StatusGTE applies the GTE predicate on the "status" field.
+func StatusGTE(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGTE(FieldStatus, v))
+}
+
+// StatusLT applies the LT predicate on the "status" field.
+func StatusLT(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLT(FieldStatus, v))
+}
+
+// StatusLTE applies the LTE predicate on the "status" field.
+func StatusLTE(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLTE(FieldStatus, v))
+}
+
+// StatusContains applies the Contains predicate on the "status" field.
+func StatusContains(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldContains(FieldStatus, v))
+}
+
+// StatusHasPrefix applies the HasPrefix predicate on the "status" field.
+func StatusHasPrefix(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldHasPrefix(FieldStatus, v))
+}
+
+// StatusHasSuffix applies the HasSuffix predicate on the "status" field.
+func StatusHasSuffix(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldHasSuffix(FieldStatus, v))
+}
+
+// StatusEqualFold applies the EqualFold predicate on the "status" field.
+func StatusEqualFold(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEqualFold(FieldStatus, v))
+}
+
+// StatusContainsFold applies the ContainsFold predicate on the "status" field.
+func StatusContainsFold(v string) predicate.Announcement {
+ return predicate.Announcement(sql.FieldContainsFold(FieldStatus, v))
+}
+
+// TargetingIsNil applies the IsNil predicate on the "targeting" field.
+func TargetingIsNil() predicate.Announcement {
+ return predicate.Announcement(sql.FieldIsNull(FieldTargeting))
+}
+
+// TargetingNotNil applies the NotNil predicate on the "targeting" field.
+func TargetingNotNil() predicate.Announcement {
+ return predicate.Announcement(sql.FieldNotNull(FieldTargeting))
+}
+
+// StartsAtEQ applies the EQ predicate on the "starts_at" field.
+func StartsAtEQ(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldStartsAt, v))
+}
+
+// StartsAtNEQ applies the NEQ predicate on the "starts_at" field.
+func StartsAtNEQ(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNEQ(FieldStartsAt, v))
+}
+
+// StartsAtIn applies the In predicate on the "starts_at" field.
+func StartsAtIn(vs ...time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldIn(FieldStartsAt, vs...))
+}
+
+// StartsAtNotIn applies the NotIn predicate on the "starts_at" field.
+func StartsAtNotIn(vs ...time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNotIn(FieldStartsAt, vs...))
+}
+
+// StartsAtGT applies the GT predicate on the "starts_at" field.
+func StartsAtGT(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGT(FieldStartsAt, v))
+}
+
+// StartsAtGTE applies the GTE predicate on the "starts_at" field.
+func StartsAtGTE(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGTE(FieldStartsAt, v))
+}
+
+// StartsAtLT applies the LT predicate on the "starts_at" field.
+func StartsAtLT(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLT(FieldStartsAt, v))
+}
+
+// StartsAtLTE applies the LTE predicate on the "starts_at" field.
+func StartsAtLTE(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLTE(FieldStartsAt, v))
+}
+
+// StartsAtIsNil applies the IsNil predicate on the "starts_at" field.
+func StartsAtIsNil() predicate.Announcement {
+ return predicate.Announcement(sql.FieldIsNull(FieldStartsAt))
+}
+
+// StartsAtNotNil applies the NotNil predicate on the "starts_at" field.
+func StartsAtNotNil() predicate.Announcement {
+ return predicate.Announcement(sql.FieldNotNull(FieldStartsAt))
+}
+
+// EndsAtEQ applies the EQ predicate on the "ends_at" field.
+func EndsAtEQ(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldEndsAt, v))
+}
+
+// EndsAtNEQ applies the NEQ predicate on the "ends_at" field.
+func EndsAtNEQ(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNEQ(FieldEndsAt, v))
+}
+
+// EndsAtIn applies the In predicate on the "ends_at" field.
+func EndsAtIn(vs ...time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldIn(FieldEndsAt, vs...))
+}
+
+// EndsAtNotIn applies the NotIn predicate on the "ends_at" field.
+func EndsAtNotIn(vs ...time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNotIn(FieldEndsAt, vs...))
+}
+
+// EndsAtGT applies the GT predicate on the "ends_at" field.
+func EndsAtGT(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGT(FieldEndsAt, v))
+}
+
+// EndsAtGTE applies the GTE predicate on the "ends_at" field.
+func EndsAtGTE(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGTE(FieldEndsAt, v))
+}
+
+// EndsAtLT applies the LT predicate on the "ends_at" field.
+func EndsAtLT(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLT(FieldEndsAt, v))
+}
+
+// EndsAtLTE applies the LTE predicate on the "ends_at" field.
+func EndsAtLTE(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLTE(FieldEndsAt, v))
+}
+
+// EndsAtIsNil applies the IsNil predicate on the "ends_at" field.
+func EndsAtIsNil() predicate.Announcement {
+ return predicate.Announcement(sql.FieldIsNull(FieldEndsAt))
+}
+
+// EndsAtNotNil applies the NotNil predicate on the "ends_at" field.
+func EndsAtNotNil() predicate.Announcement {
+ return predicate.Announcement(sql.FieldNotNull(FieldEndsAt))
+}
+
+// CreatedByEQ applies the EQ predicate on the "created_by" field.
+func CreatedByEQ(v int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldCreatedBy, v))
+}
+
+// CreatedByNEQ applies the NEQ predicate on the "created_by" field.
+func CreatedByNEQ(v int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNEQ(FieldCreatedBy, v))
+}
+
+// CreatedByIn applies the In predicate on the "created_by" field.
+func CreatedByIn(vs ...int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldIn(FieldCreatedBy, vs...))
+}
+
+// CreatedByNotIn applies the NotIn predicate on the "created_by" field.
+func CreatedByNotIn(vs ...int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNotIn(FieldCreatedBy, vs...))
+}
+
+// CreatedByGT applies the GT predicate on the "created_by" field.
+func CreatedByGT(v int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGT(FieldCreatedBy, v))
+}
+
+// CreatedByGTE applies the GTE predicate on the "created_by" field.
+func CreatedByGTE(v int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGTE(FieldCreatedBy, v))
+}
+
+// CreatedByLT applies the LT predicate on the "created_by" field.
+func CreatedByLT(v int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLT(FieldCreatedBy, v))
+}
+
+// CreatedByLTE applies the LTE predicate on the "created_by" field.
+func CreatedByLTE(v int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLTE(FieldCreatedBy, v))
+}
+
+// CreatedByIsNil applies the IsNil predicate on the "created_by" field.
+func CreatedByIsNil() predicate.Announcement {
+ return predicate.Announcement(sql.FieldIsNull(FieldCreatedBy))
+}
+
+// CreatedByNotNil applies the NotNil predicate on the "created_by" field.
+func CreatedByNotNil() predicate.Announcement {
+ return predicate.Announcement(sql.FieldNotNull(FieldCreatedBy))
+}
+
+// UpdatedByEQ applies the EQ predicate on the "updated_by" field.
+func UpdatedByEQ(v int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldUpdatedBy, v))
+}
+
+// UpdatedByNEQ applies the NEQ predicate on the "updated_by" field.
+func UpdatedByNEQ(v int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNEQ(FieldUpdatedBy, v))
+}
+
+// UpdatedByIn applies the In predicate on the "updated_by" field.
+func UpdatedByIn(vs ...int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldIn(FieldUpdatedBy, vs...))
+}
+
+// UpdatedByNotIn applies the NotIn predicate on the "updated_by" field.
+func UpdatedByNotIn(vs ...int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNotIn(FieldUpdatedBy, vs...))
+}
+
+// UpdatedByGT applies the GT predicate on the "updated_by" field.
+func UpdatedByGT(v int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGT(FieldUpdatedBy, v))
+}
+
+// UpdatedByGTE applies the GTE predicate on the "updated_by" field.
+func UpdatedByGTE(v int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGTE(FieldUpdatedBy, v))
+}
+
+// UpdatedByLT applies the LT predicate on the "updated_by" field.
+func UpdatedByLT(v int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLT(FieldUpdatedBy, v))
+}
+
+// UpdatedByLTE applies the LTE predicate on the "updated_by" field.
+func UpdatedByLTE(v int64) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLTE(FieldUpdatedBy, v))
+}
+
+// UpdatedByIsNil applies the IsNil predicate on the "updated_by" field.
+func UpdatedByIsNil() predicate.Announcement {
+ return predicate.Announcement(sql.FieldIsNull(FieldUpdatedBy))
+}
+
+// UpdatedByNotNil applies the NotNil predicate on the "updated_by" field.
+func UpdatedByNotNil() predicate.Announcement {
+ return predicate.Announcement(sql.FieldNotNull(FieldUpdatedBy))
+}
+
+// CreatedAtEQ applies the EQ predicate on the "created_at" field.
+func CreatedAtEQ(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldCreatedAt, v))
+}
+
+// CreatedAtNEQ applies the NEQ predicate on the "created_at" field.
+func CreatedAtNEQ(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNEQ(FieldCreatedAt, v))
+}
+
+// CreatedAtIn applies the In predicate on the "created_at" field.
+func CreatedAtIn(vs ...time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldIn(FieldCreatedAt, vs...))
+}
+
+// CreatedAtNotIn applies the NotIn predicate on the "created_at" field.
+func CreatedAtNotIn(vs ...time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNotIn(FieldCreatedAt, vs...))
+}
+
+// CreatedAtGT applies the GT predicate on the "created_at" field.
+func CreatedAtGT(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGT(FieldCreatedAt, v))
+}
+
+// CreatedAtGTE applies the GTE predicate on the "created_at" field.
+func CreatedAtGTE(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGTE(FieldCreatedAt, v))
+}
+
+// CreatedAtLT applies the LT predicate on the "created_at" field.
+func CreatedAtLT(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLT(FieldCreatedAt, v))
+}
+
+// CreatedAtLTE applies the LTE predicate on the "created_at" field.
+func CreatedAtLTE(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLTE(FieldCreatedAt, v))
+}
+
+// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
+func UpdatedAtEQ(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldEQ(FieldUpdatedAt, v))
+}
+
+// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field.
+func UpdatedAtNEQ(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNEQ(FieldUpdatedAt, v))
+}
+
+// UpdatedAtIn applies the In predicate on the "updated_at" field.
+func UpdatedAtIn(vs ...time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldIn(FieldUpdatedAt, vs...))
+}
+
+// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field.
+func UpdatedAtNotIn(vs ...time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldNotIn(FieldUpdatedAt, vs...))
+}
+
+// UpdatedAtGT applies the GT predicate on the "updated_at" field.
+func UpdatedAtGT(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGT(FieldUpdatedAt, v))
+}
+
+// UpdatedAtGTE applies the GTE predicate on the "updated_at" field.
+func UpdatedAtGTE(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldGTE(FieldUpdatedAt, v))
+}
+
+// UpdatedAtLT applies the LT predicate on the "updated_at" field.
+func UpdatedAtLT(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLT(FieldUpdatedAt, v))
+}
+
+// UpdatedAtLTE applies the LTE predicate on the "updated_at" field.
+func UpdatedAtLTE(v time.Time) predicate.Announcement {
+ return predicate.Announcement(sql.FieldLTE(FieldUpdatedAt, v))
+}
+
+// HasReads applies the HasEdge predicate on the "reads" edge.
+func HasReads() predicate.Announcement {
+ return predicate.Announcement(func(s *sql.Selector) {
+ step := sqlgraph.NewStep(
+ sqlgraph.From(Table, FieldID),
+ sqlgraph.Edge(sqlgraph.O2M, false, ReadsTable, ReadsColumn),
+ )
+ sqlgraph.HasNeighbors(s, step)
+ })
+}
+
+// HasReadsWith applies the HasEdge predicate on the "reads" edge with a given conditions (other predicates).
+func HasReadsWith(preds ...predicate.AnnouncementRead) predicate.Announcement {
+ return predicate.Announcement(func(s *sql.Selector) {
+ step := newReadsStep()
+ sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
+ for _, p := range preds {
+ p(s)
+ }
+ })
+ })
+}
+
+// And groups predicates with the AND operator between them.
+func And(predicates ...predicate.Announcement) predicate.Announcement {
+ return predicate.Announcement(sql.AndPredicates(predicates...))
+}
+
+// Or groups predicates with the OR operator between them.
+func Or(predicates ...predicate.Announcement) predicate.Announcement {
+ return predicate.Announcement(sql.OrPredicates(predicates...))
+}
+
+// Not applies the not operator on the given predicate.
+func Not(p predicate.Announcement) predicate.Announcement {
+ return predicate.Announcement(sql.NotPredicates(p))
+}
diff --git a/backend/ent/announcement_create.go b/backend/ent/announcement_create.go
new file mode 100644
index 00000000..151d4c11
--- /dev/null
+++ b/backend/ent/announcement_create.go
@@ -0,0 +1,1159 @@
+// Code generated by ent, DO NOT EDIT.
+
+package ent
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "time"
+
+ "entgo.io/ent/dialect/sql"
+ "entgo.io/ent/dialect/sql/sqlgraph"
+ "entgo.io/ent/schema/field"
+ "github.com/Wei-Shaw/sub2api/ent/announcement"
+ "github.com/Wei-Shaw/sub2api/ent/announcementread"
+ "github.com/Wei-Shaw/sub2api/internal/domain"
+)
+
+// AnnouncementCreate is the builder for creating a Announcement entity.
+type AnnouncementCreate struct {
+ config
+ mutation *AnnouncementMutation
+ hooks []Hook
+ conflict []sql.ConflictOption
+}
+
+// SetTitle sets the "title" field.
+func (_c *AnnouncementCreate) SetTitle(v string) *AnnouncementCreate {
+ _c.mutation.SetTitle(v)
+ return _c
+}
+
+// SetContent sets the "content" field.
+func (_c *AnnouncementCreate) SetContent(v string) *AnnouncementCreate {
+ _c.mutation.SetContent(v)
+ return _c
+}
+
+// SetStatus sets the "status" field.
+func (_c *AnnouncementCreate) SetStatus(v string) *AnnouncementCreate {
+ _c.mutation.SetStatus(v)
+ return _c
+}
+
+// SetNillableStatus sets the "status" field if the given value is not nil.
+func (_c *AnnouncementCreate) SetNillableStatus(v *string) *AnnouncementCreate {
+ if v != nil {
+ _c.SetStatus(*v)
+ }
+ return _c
+}
+
+// SetTargeting sets the "targeting" field.
+func (_c *AnnouncementCreate) SetTargeting(v domain.AnnouncementTargeting) *AnnouncementCreate {
+ _c.mutation.SetTargeting(v)
+ return _c
+}
+
+// SetNillableTargeting sets the "targeting" field if the given value is not nil.
+func (_c *AnnouncementCreate) SetNillableTargeting(v *domain.AnnouncementTargeting) *AnnouncementCreate {
+ if v != nil {
+ _c.SetTargeting(*v)
+ }
+ return _c
+}
+
+// SetStartsAt sets the "starts_at" field.
+func (_c *AnnouncementCreate) SetStartsAt(v time.Time) *AnnouncementCreate {
+ _c.mutation.SetStartsAt(v)
+ return _c
+}
+
+// SetNillableStartsAt sets the "starts_at" field if the given value is not nil.
+func (_c *AnnouncementCreate) SetNillableStartsAt(v *time.Time) *AnnouncementCreate {
+ if v != nil {
+ _c.SetStartsAt(*v)
+ }
+ return _c
+}
+
+// SetEndsAt sets the "ends_at" field.
+func (_c *AnnouncementCreate) SetEndsAt(v time.Time) *AnnouncementCreate {
+ _c.mutation.SetEndsAt(v)
+ return _c
+}
+
+// SetNillableEndsAt sets the "ends_at" field if the given value is not nil.
+func (_c *AnnouncementCreate) SetNillableEndsAt(v *time.Time) *AnnouncementCreate {
+ if v != nil {
+ _c.SetEndsAt(*v)
+ }
+ return _c
+}
+
+// SetCreatedBy sets the "created_by" field.
+func (_c *AnnouncementCreate) SetCreatedBy(v int64) *AnnouncementCreate {
+ _c.mutation.SetCreatedBy(v)
+ return _c
+}
+
+// SetNillableCreatedBy sets the "created_by" field if the given value is not nil.
+func (_c *AnnouncementCreate) SetNillableCreatedBy(v *int64) *AnnouncementCreate {
+ if v != nil {
+ _c.SetCreatedBy(*v)
+ }
+ return _c
+}
+
+// SetUpdatedBy sets the "updated_by" field.
+func (_c *AnnouncementCreate) SetUpdatedBy(v int64) *AnnouncementCreate {
+ _c.mutation.SetUpdatedBy(v)
+ return _c
+}
+
+// SetNillableUpdatedBy sets the "updated_by" field if the given value is not nil.
+func (_c *AnnouncementCreate) SetNillableUpdatedBy(v *int64) *AnnouncementCreate {
+ if v != nil {
+ _c.SetUpdatedBy(*v)
+ }
+ return _c
+}
+
+// SetCreatedAt sets the "created_at" field.
+func (_c *AnnouncementCreate) SetCreatedAt(v time.Time) *AnnouncementCreate {
+ _c.mutation.SetCreatedAt(v)
+ return _c
+}
+
+// SetNillableCreatedAt sets the "created_at" field if the given value is not nil.
+func (_c *AnnouncementCreate) SetNillableCreatedAt(v *time.Time) *AnnouncementCreate {
+ if v != nil {
+ _c.SetCreatedAt(*v)
+ }
+ return _c
+}
+
+// SetUpdatedAt sets the "updated_at" field.
+func (_c *AnnouncementCreate) SetUpdatedAt(v time.Time) *AnnouncementCreate {
+ _c.mutation.SetUpdatedAt(v)
+ return _c
+}
+
+// SetNillableUpdatedAt sets the "updated_at" field if the given value is not nil.
+func (_c *AnnouncementCreate) SetNillableUpdatedAt(v *time.Time) *AnnouncementCreate {
+ if v != nil {
+ _c.SetUpdatedAt(*v)
+ }
+ return _c
+}
+
+// AddReadIDs adds the "reads" edge to the AnnouncementRead entity by IDs.
+func (_c *AnnouncementCreate) AddReadIDs(ids ...int64) *AnnouncementCreate {
+ _c.mutation.AddReadIDs(ids...)
+ return _c
+}
+
+// AddReads adds the "reads" edges to the AnnouncementRead entity.
+func (_c *AnnouncementCreate) AddReads(v ...*AnnouncementRead) *AnnouncementCreate {
+ ids := make([]int64, len(v))
+ for i := range v {
+ ids[i] = v[i].ID
+ }
+ return _c.AddReadIDs(ids...)
+}
+
+// Mutation returns the AnnouncementMutation object of the builder.
+func (_c *AnnouncementCreate) Mutation() *AnnouncementMutation {
+ return _c.mutation
+}
+
+// Save creates the Announcement in the database.
+func (_c *AnnouncementCreate) Save(ctx context.Context) (*Announcement, error) {
+ _c.defaults()
+ return withHooks(ctx, _c.sqlSave, _c.mutation, _c.hooks)
+}
+
+// SaveX calls Save and panics if Save returns an error.
+func (_c *AnnouncementCreate) SaveX(ctx context.Context) *Announcement {
+ v, err := _c.Save(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return v
+}
+
+// Exec executes the query.
+func (_c *AnnouncementCreate) Exec(ctx context.Context) error {
+ _, err := _c.Save(ctx)
+ return err
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (_c *AnnouncementCreate) ExecX(ctx context.Context) {
+ if err := _c.Exec(ctx); err != nil {
+ panic(err)
+ }
+}
+
+// defaults sets the default values of the builder before save.
+func (_c *AnnouncementCreate) defaults() {
+ if _, ok := _c.mutation.Status(); !ok {
+ v := announcement.DefaultStatus
+ _c.mutation.SetStatus(v)
+ }
+ if _, ok := _c.mutation.CreatedAt(); !ok {
+ v := announcement.DefaultCreatedAt()
+ _c.mutation.SetCreatedAt(v)
+ }
+ if _, ok := _c.mutation.UpdatedAt(); !ok {
+ v := announcement.DefaultUpdatedAt()
+ _c.mutation.SetUpdatedAt(v)
+ }
+}
+
+// check runs all checks and user-defined validators on the builder.
+func (_c *AnnouncementCreate) check() error {
+ if _, ok := _c.mutation.Title(); !ok {
+ return &ValidationError{Name: "title", err: errors.New(`ent: missing required field "Announcement.title"`)}
+ }
+ if v, ok := _c.mutation.Title(); ok {
+ if err := announcement.TitleValidator(v); err != nil {
+ return &ValidationError{Name: "title", err: fmt.Errorf(`ent: validator failed for field "Announcement.title": %w`, err)}
+ }
+ }
+ if _, ok := _c.mutation.Content(); !ok {
+ return &ValidationError{Name: "content", err: errors.New(`ent: missing required field "Announcement.content"`)}
+ }
+ if v, ok := _c.mutation.Content(); ok {
+ if err := announcement.ContentValidator(v); err != nil {
+ return &ValidationError{Name: "content", err: fmt.Errorf(`ent: validator failed for field "Announcement.content": %w`, err)}
+ }
+ }
+ if _, ok := _c.mutation.Status(); !ok {
+ return &ValidationError{Name: "status", err: errors.New(`ent: missing required field "Announcement.status"`)}
+ }
+ if v, ok := _c.mutation.Status(); ok {
+ if err := announcement.StatusValidator(v); err != nil {
+ return &ValidationError{Name: "status", err: fmt.Errorf(`ent: validator failed for field "Announcement.status": %w`, err)}
+ }
+ }
+ if _, ok := _c.mutation.CreatedAt(); !ok {
+ return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "Announcement.created_at"`)}
+ }
+ if _, ok := _c.mutation.UpdatedAt(); !ok {
+ return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "Announcement.updated_at"`)}
+ }
+ return nil
+}
+
+func (_c *AnnouncementCreate) sqlSave(ctx context.Context) (*Announcement, error) {
+ if err := _c.check(); err != nil {
+ return nil, err
+ }
+ _node, _spec := _c.createSpec()
+ if err := sqlgraph.CreateNode(ctx, _c.driver, _spec); err != nil {
+ if sqlgraph.IsConstraintError(err) {
+ err = &ConstraintError{msg: err.Error(), wrap: err}
+ }
+ return nil, err
+ }
+ id := _spec.ID.Value.(int64)
+ _node.ID = int64(id)
+ _c.mutation.id = &_node.ID
+ _c.mutation.done = true
+ return _node, nil
+}
+
+func (_c *AnnouncementCreate) createSpec() (*Announcement, *sqlgraph.CreateSpec) {
+ var (
+ _node = &Announcement{config: _c.config}
+ _spec = sqlgraph.NewCreateSpec(announcement.Table, sqlgraph.NewFieldSpec(announcement.FieldID, field.TypeInt64))
+ )
+ _spec.OnConflict = _c.conflict
+ if value, ok := _c.mutation.Title(); ok {
+ _spec.SetField(announcement.FieldTitle, field.TypeString, value)
+ _node.Title = value
+ }
+ if value, ok := _c.mutation.Content(); ok {
+ _spec.SetField(announcement.FieldContent, field.TypeString, value)
+ _node.Content = value
+ }
+ if value, ok := _c.mutation.Status(); ok {
+ _spec.SetField(announcement.FieldStatus, field.TypeString, value)
+ _node.Status = value
+ }
+ if value, ok := _c.mutation.Targeting(); ok {
+ _spec.SetField(announcement.FieldTargeting, field.TypeJSON, value)
+ _node.Targeting = value
+ }
+ if value, ok := _c.mutation.StartsAt(); ok {
+ _spec.SetField(announcement.FieldStartsAt, field.TypeTime, value)
+ _node.StartsAt = &value
+ }
+ if value, ok := _c.mutation.EndsAt(); ok {
+ _spec.SetField(announcement.FieldEndsAt, field.TypeTime, value)
+ _node.EndsAt = &value
+ }
+ if value, ok := _c.mutation.CreatedBy(); ok {
+ _spec.SetField(announcement.FieldCreatedBy, field.TypeInt64, value)
+ _node.CreatedBy = &value
+ }
+ if value, ok := _c.mutation.UpdatedBy(); ok {
+ _spec.SetField(announcement.FieldUpdatedBy, field.TypeInt64, value)
+ _node.UpdatedBy = &value
+ }
+ if value, ok := _c.mutation.CreatedAt(); ok {
+ _spec.SetField(announcement.FieldCreatedAt, field.TypeTime, value)
+ _node.CreatedAt = value
+ }
+ if value, ok := _c.mutation.UpdatedAt(); ok {
+ _spec.SetField(announcement.FieldUpdatedAt, field.TypeTime, value)
+ _node.UpdatedAt = value
+ }
+ if nodes := _c.mutation.ReadsIDs(); len(nodes) > 0 {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.O2M,
+ Inverse: false,
+ Table: announcement.ReadsTable,
+ Columns: []string{announcement.ReadsColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64),
+ },
+ }
+ for _, k := range nodes {
+ edge.Target.Nodes = append(edge.Target.Nodes, k)
+ }
+ _spec.Edges = append(_spec.Edges, edge)
+ }
+ return _node, _spec
+}
+
+// OnConflict allows configuring the `ON CONFLICT` / `ON DUPLICATE KEY` clause
+// of the `INSERT` statement. For example:
+//
+// client.Announcement.Create().
+// SetTitle(v).
+// OnConflict(
+// // Update the row with the new values
+// // the was proposed for insertion.
+// sql.ResolveWithNewValues(),
+// ).
+// // Override some of the fields with custom
+// // update values.
+// Update(func(u *ent.AnnouncementUpsert) {
+// SetTitle(v+v).
+// }).
+// Exec(ctx)
+func (_c *AnnouncementCreate) OnConflict(opts ...sql.ConflictOption) *AnnouncementUpsertOne {
+ _c.conflict = opts
+ return &AnnouncementUpsertOne{
+ create: _c,
+ }
+}
+
+// OnConflictColumns calls `OnConflict` and configures the columns
+// as conflict target. Using this option is equivalent to using:
+//
+// client.Announcement.Create().
+// OnConflict(sql.ConflictColumns(columns...)).
+// Exec(ctx)
+func (_c *AnnouncementCreate) OnConflictColumns(columns ...string) *AnnouncementUpsertOne {
+ _c.conflict = append(_c.conflict, sql.ConflictColumns(columns...))
+ return &AnnouncementUpsertOne{
+ create: _c,
+ }
+}
+
+type (
+ // AnnouncementUpsertOne is the builder for "upsert"-ing
+ // one Announcement node.
+ AnnouncementUpsertOne struct {
+ create *AnnouncementCreate
+ }
+
+ // AnnouncementUpsert is the "OnConflict" setter.
+ AnnouncementUpsert struct {
+ *sql.UpdateSet
+ }
+)
+
+// SetTitle sets the "title" field.
+func (u *AnnouncementUpsert) SetTitle(v string) *AnnouncementUpsert {
+ u.Set(announcement.FieldTitle, v)
+ return u
+}
+
+// UpdateTitle sets the "title" field to the value that was provided on create.
+func (u *AnnouncementUpsert) UpdateTitle() *AnnouncementUpsert {
+ u.SetExcluded(announcement.FieldTitle)
+ return u
+}
+
+// SetContent sets the "content" field.
+func (u *AnnouncementUpsert) SetContent(v string) *AnnouncementUpsert {
+ u.Set(announcement.FieldContent, v)
+ return u
+}
+
+// UpdateContent sets the "content" field to the value that was provided on create.
+func (u *AnnouncementUpsert) UpdateContent() *AnnouncementUpsert {
+ u.SetExcluded(announcement.FieldContent)
+ return u
+}
+
+// SetStatus sets the "status" field.
+func (u *AnnouncementUpsert) SetStatus(v string) *AnnouncementUpsert {
+ u.Set(announcement.FieldStatus, v)
+ return u
+}
+
+// UpdateStatus sets the "status" field to the value that was provided on create.
+func (u *AnnouncementUpsert) UpdateStatus() *AnnouncementUpsert {
+ u.SetExcluded(announcement.FieldStatus)
+ return u
+}
+
+// SetTargeting sets the "targeting" field.
+func (u *AnnouncementUpsert) SetTargeting(v domain.AnnouncementTargeting) *AnnouncementUpsert {
+ u.Set(announcement.FieldTargeting, v)
+ return u
+}
+
+// UpdateTargeting sets the "targeting" field to the value that was provided on create.
+func (u *AnnouncementUpsert) UpdateTargeting() *AnnouncementUpsert {
+ u.SetExcluded(announcement.FieldTargeting)
+ return u
+}
+
+// ClearTargeting clears the value of the "targeting" field.
+func (u *AnnouncementUpsert) ClearTargeting() *AnnouncementUpsert {
+ u.SetNull(announcement.FieldTargeting)
+ return u
+}
+
+// SetStartsAt sets the "starts_at" field.
+func (u *AnnouncementUpsert) SetStartsAt(v time.Time) *AnnouncementUpsert {
+ u.Set(announcement.FieldStartsAt, v)
+ return u
+}
+
+// UpdateStartsAt sets the "starts_at" field to the value that was provided on create.
+func (u *AnnouncementUpsert) UpdateStartsAt() *AnnouncementUpsert {
+ u.SetExcluded(announcement.FieldStartsAt)
+ return u
+}
+
+// ClearStartsAt clears the value of the "starts_at" field.
+func (u *AnnouncementUpsert) ClearStartsAt() *AnnouncementUpsert {
+ u.SetNull(announcement.FieldStartsAt)
+ return u
+}
+
+// SetEndsAt sets the "ends_at" field.
+func (u *AnnouncementUpsert) SetEndsAt(v time.Time) *AnnouncementUpsert {
+ u.Set(announcement.FieldEndsAt, v)
+ return u
+}
+
+// UpdateEndsAt sets the "ends_at" field to the value that was provided on create.
+func (u *AnnouncementUpsert) UpdateEndsAt() *AnnouncementUpsert {
+ u.SetExcluded(announcement.FieldEndsAt)
+ return u
+}
+
+// ClearEndsAt clears the value of the "ends_at" field.
+func (u *AnnouncementUpsert) ClearEndsAt() *AnnouncementUpsert {
+ u.SetNull(announcement.FieldEndsAt)
+ return u
+}
+
+// SetCreatedBy sets the "created_by" field.
+func (u *AnnouncementUpsert) SetCreatedBy(v int64) *AnnouncementUpsert {
+ u.Set(announcement.FieldCreatedBy, v)
+ return u
+}
+
+// UpdateCreatedBy sets the "created_by" field to the value that was provided on create.
+func (u *AnnouncementUpsert) UpdateCreatedBy() *AnnouncementUpsert {
+ u.SetExcluded(announcement.FieldCreatedBy)
+ return u
+}
+
+// AddCreatedBy adds v to the "created_by" field.
+func (u *AnnouncementUpsert) AddCreatedBy(v int64) *AnnouncementUpsert {
+ u.Add(announcement.FieldCreatedBy, v)
+ return u
+}
+
+// ClearCreatedBy clears the value of the "created_by" field.
+func (u *AnnouncementUpsert) ClearCreatedBy() *AnnouncementUpsert {
+ u.SetNull(announcement.FieldCreatedBy)
+ return u
+}
+
+// SetUpdatedBy sets the "updated_by" field.
+func (u *AnnouncementUpsert) SetUpdatedBy(v int64) *AnnouncementUpsert {
+ u.Set(announcement.FieldUpdatedBy, v)
+ return u
+}
+
+// UpdateUpdatedBy sets the "updated_by" field to the value that was provided on create.
+func (u *AnnouncementUpsert) UpdateUpdatedBy() *AnnouncementUpsert {
+ u.SetExcluded(announcement.FieldUpdatedBy)
+ return u
+}
+
+// AddUpdatedBy adds v to the "updated_by" field.
+func (u *AnnouncementUpsert) AddUpdatedBy(v int64) *AnnouncementUpsert {
+ u.Add(announcement.FieldUpdatedBy, v)
+ return u
+}
+
+// ClearUpdatedBy clears the value of the "updated_by" field.
+func (u *AnnouncementUpsert) ClearUpdatedBy() *AnnouncementUpsert {
+ u.SetNull(announcement.FieldUpdatedBy)
+ return u
+}
+
+// SetUpdatedAt sets the "updated_at" field.
+func (u *AnnouncementUpsert) SetUpdatedAt(v time.Time) *AnnouncementUpsert {
+ u.Set(announcement.FieldUpdatedAt, v)
+ return u
+}
+
+// UpdateUpdatedAt sets the "updated_at" field to the value that was provided on create.
+func (u *AnnouncementUpsert) UpdateUpdatedAt() *AnnouncementUpsert {
+ u.SetExcluded(announcement.FieldUpdatedAt)
+ return u
+}
+
+// UpdateNewValues updates the mutable fields using the new values that were set on create.
+// Using this option is equivalent to using:
+//
+// client.Announcement.Create().
+// OnConflict(
+// sql.ResolveWithNewValues(),
+// ).
+// Exec(ctx)
+func (u *AnnouncementUpsertOne) UpdateNewValues() *AnnouncementUpsertOne {
+ u.create.conflict = append(u.create.conflict, sql.ResolveWithNewValues())
+ u.create.conflict = append(u.create.conflict, sql.ResolveWith(func(s *sql.UpdateSet) {
+ if _, exists := u.create.mutation.CreatedAt(); exists {
+ s.SetIgnore(announcement.FieldCreatedAt)
+ }
+ }))
+ return u
+}
+
+// Ignore sets each column to itself in case of conflict.
+// Using this option is equivalent to using:
+//
+// client.Announcement.Create().
+// OnConflict(sql.ResolveWithIgnore()).
+// Exec(ctx)
+func (u *AnnouncementUpsertOne) Ignore() *AnnouncementUpsertOne {
+ u.create.conflict = append(u.create.conflict, sql.ResolveWithIgnore())
+ return u
+}
+
+// DoNothing configures the conflict_action to `DO NOTHING`.
+// Supported only by SQLite and PostgreSQL.
+func (u *AnnouncementUpsertOne) DoNothing() *AnnouncementUpsertOne {
+ u.create.conflict = append(u.create.conflict, sql.DoNothing())
+ return u
+}
+
+// Update allows overriding fields `UPDATE` values. See the AnnouncementCreate.OnConflict
+// documentation for more info.
+func (u *AnnouncementUpsertOne) Update(set func(*AnnouncementUpsert)) *AnnouncementUpsertOne {
+ u.create.conflict = append(u.create.conflict, sql.ResolveWith(func(update *sql.UpdateSet) {
+ set(&AnnouncementUpsert{UpdateSet: update})
+ }))
+ return u
+}
+
+// SetTitle sets the "title" field.
+func (u *AnnouncementUpsertOne) SetTitle(v string) *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetTitle(v)
+ })
+}
+
+// UpdateTitle sets the "title" field to the value that was provided on create.
+func (u *AnnouncementUpsertOne) UpdateTitle() *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateTitle()
+ })
+}
+
+// SetContent sets the "content" field.
+func (u *AnnouncementUpsertOne) SetContent(v string) *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetContent(v)
+ })
+}
+
+// UpdateContent sets the "content" field to the value that was provided on create.
+func (u *AnnouncementUpsertOne) UpdateContent() *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateContent()
+ })
+}
+
+// SetStatus sets the "status" field.
+func (u *AnnouncementUpsertOne) SetStatus(v string) *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetStatus(v)
+ })
+}
+
+// UpdateStatus sets the "status" field to the value that was provided on create.
+func (u *AnnouncementUpsertOne) UpdateStatus() *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateStatus()
+ })
+}
+
+// SetTargeting sets the "targeting" field.
+func (u *AnnouncementUpsertOne) SetTargeting(v domain.AnnouncementTargeting) *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetTargeting(v)
+ })
+}
+
+// UpdateTargeting sets the "targeting" field to the value that was provided on create.
+func (u *AnnouncementUpsertOne) UpdateTargeting() *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateTargeting()
+ })
+}
+
+// ClearTargeting clears the value of the "targeting" field.
+func (u *AnnouncementUpsertOne) ClearTargeting() *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.ClearTargeting()
+ })
+}
+
+// SetStartsAt sets the "starts_at" field.
+func (u *AnnouncementUpsertOne) SetStartsAt(v time.Time) *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetStartsAt(v)
+ })
+}
+
+// UpdateStartsAt sets the "starts_at" field to the value that was provided on create.
+func (u *AnnouncementUpsertOne) UpdateStartsAt() *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateStartsAt()
+ })
+}
+
+// ClearStartsAt clears the value of the "starts_at" field.
+func (u *AnnouncementUpsertOne) ClearStartsAt() *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.ClearStartsAt()
+ })
+}
+
+// SetEndsAt sets the "ends_at" field.
+func (u *AnnouncementUpsertOne) SetEndsAt(v time.Time) *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetEndsAt(v)
+ })
+}
+
+// UpdateEndsAt sets the "ends_at" field to the value that was provided on create.
+func (u *AnnouncementUpsertOne) UpdateEndsAt() *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateEndsAt()
+ })
+}
+
+// ClearEndsAt clears the value of the "ends_at" field.
+func (u *AnnouncementUpsertOne) ClearEndsAt() *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.ClearEndsAt()
+ })
+}
+
+// SetCreatedBy sets the "created_by" field.
+func (u *AnnouncementUpsertOne) SetCreatedBy(v int64) *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetCreatedBy(v)
+ })
+}
+
+// AddCreatedBy adds v to the "created_by" field.
+func (u *AnnouncementUpsertOne) AddCreatedBy(v int64) *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.AddCreatedBy(v)
+ })
+}
+
+// UpdateCreatedBy sets the "created_by" field to the value that was provided on create.
+func (u *AnnouncementUpsertOne) UpdateCreatedBy() *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateCreatedBy()
+ })
+}
+
+// ClearCreatedBy clears the value of the "created_by" field.
+func (u *AnnouncementUpsertOne) ClearCreatedBy() *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.ClearCreatedBy()
+ })
+}
+
+// SetUpdatedBy sets the "updated_by" field.
+func (u *AnnouncementUpsertOne) SetUpdatedBy(v int64) *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetUpdatedBy(v)
+ })
+}
+
+// AddUpdatedBy adds v to the "updated_by" field.
+func (u *AnnouncementUpsertOne) AddUpdatedBy(v int64) *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.AddUpdatedBy(v)
+ })
+}
+
+// UpdateUpdatedBy sets the "updated_by" field to the value that was provided on create.
+func (u *AnnouncementUpsertOne) UpdateUpdatedBy() *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateUpdatedBy()
+ })
+}
+
+// ClearUpdatedBy clears the value of the "updated_by" field.
+func (u *AnnouncementUpsertOne) ClearUpdatedBy() *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.ClearUpdatedBy()
+ })
+}
+
+// SetUpdatedAt sets the "updated_at" field.
+func (u *AnnouncementUpsertOne) SetUpdatedAt(v time.Time) *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetUpdatedAt(v)
+ })
+}
+
+// UpdateUpdatedAt sets the "updated_at" field to the value that was provided on create.
+func (u *AnnouncementUpsertOne) UpdateUpdatedAt() *AnnouncementUpsertOne {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateUpdatedAt()
+ })
+}
+
+// Exec executes the query.
+func (u *AnnouncementUpsertOne) Exec(ctx context.Context) error {
+ if len(u.create.conflict) == 0 {
+ return errors.New("ent: missing options for AnnouncementCreate.OnConflict")
+ }
+ return u.create.Exec(ctx)
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (u *AnnouncementUpsertOne) ExecX(ctx context.Context) {
+ if err := u.create.Exec(ctx); err != nil {
+ panic(err)
+ }
+}
+
+// Exec executes the UPSERT query and returns the inserted/updated ID.
+func (u *AnnouncementUpsertOne) ID(ctx context.Context) (id int64, err error) {
+ node, err := u.create.Save(ctx)
+ if err != nil {
+ return id, err
+ }
+ return node.ID, nil
+}
+
+// IDX is like ID, but panics if an error occurs.
+func (u *AnnouncementUpsertOne) IDX(ctx context.Context) int64 {
+ id, err := u.ID(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return id
+}
+
+// AnnouncementCreateBulk is the builder for creating many Announcement entities in bulk.
+type AnnouncementCreateBulk struct {
+ config
+ err error
+ builders []*AnnouncementCreate
+ conflict []sql.ConflictOption
+}
+
+// Save creates the Announcement entities in the database.
+func (_c *AnnouncementCreateBulk) Save(ctx context.Context) ([]*Announcement, error) {
+ if _c.err != nil {
+ return nil, _c.err
+ }
+ specs := make([]*sqlgraph.CreateSpec, len(_c.builders))
+ nodes := make([]*Announcement, len(_c.builders))
+ mutators := make([]Mutator, len(_c.builders))
+ for i := range _c.builders {
+ func(i int, root context.Context) {
+ builder := _c.builders[i]
+ builder.defaults()
+ var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
+ mutation, ok := m.(*AnnouncementMutation)
+ if !ok {
+ return nil, fmt.Errorf("unexpected mutation type %T", m)
+ }
+ if err := builder.check(); err != nil {
+ return nil, err
+ }
+ builder.mutation = mutation
+ var err error
+ nodes[i], specs[i] = builder.createSpec()
+ if i < len(mutators)-1 {
+ _, err = mutators[i+1].Mutate(root, _c.builders[i+1].mutation)
+ } else {
+ spec := &sqlgraph.BatchCreateSpec{Nodes: specs}
+ spec.OnConflict = _c.conflict
+ // Invoke the actual operation on the latest mutation in the chain.
+ if err = sqlgraph.BatchCreate(ctx, _c.driver, spec); err != nil {
+ if sqlgraph.IsConstraintError(err) {
+ err = &ConstraintError{msg: err.Error(), wrap: err}
+ }
+ }
+ }
+ if err != nil {
+ return nil, err
+ }
+ mutation.id = &nodes[i].ID
+ if specs[i].ID.Value != nil {
+ id := specs[i].ID.Value.(int64)
+ nodes[i].ID = int64(id)
+ }
+ mutation.done = true
+ return nodes[i], nil
+ })
+ for i := len(builder.hooks) - 1; i >= 0; i-- {
+ mut = builder.hooks[i](mut)
+ }
+ mutators[i] = mut
+ }(i, ctx)
+ }
+ if len(mutators) > 0 {
+ if _, err := mutators[0].Mutate(ctx, _c.builders[0].mutation); err != nil {
+ return nil, err
+ }
+ }
+ return nodes, nil
+}
+
+// SaveX is like Save, but panics if an error occurs.
+func (_c *AnnouncementCreateBulk) SaveX(ctx context.Context) []*Announcement {
+ v, err := _c.Save(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return v
+}
+
+// Exec executes the query.
+func (_c *AnnouncementCreateBulk) Exec(ctx context.Context) error {
+ _, err := _c.Save(ctx)
+ return err
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (_c *AnnouncementCreateBulk) ExecX(ctx context.Context) {
+ if err := _c.Exec(ctx); err != nil {
+ panic(err)
+ }
+}
+
+// OnConflict allows configuring the `ON CONFLICT` / `ON DUPLICATE KEY` clause
+// of the `INSERT` statement. For example:
+//
+// client.Announcement.CreateBulk(builders...).
+// OnConflict(
+// // Update the row with the new values
+// // the was proposed for insertion.
+// sql.ResolveWithNewValues(),
+// ).
+// // Override some of the fields with custom
+// // update values.
+// Update(func(u *ent.AnnouncementUpsert) {
+// SetTitle(v+v).
+// }).
+// Exec(ctx)
+func (_c *AnnouncementCreateBulk) OnConflict(opts ...sql.ConflictOption) *AnnouncementUpsertBulk {
+ _c.conflict = opts
+ return &AnnouncementUpsertBulk{
+ create: _c,
+ }
+}
+
+// OnConflictColumns calls `OnConflict` and configures the columns
+// as conflict target. Using this option is equivalent to using:
+//
+// client.Announcement.Create().
+// OnConflict(sql.ConflictColumns(columns...)).
+// Exec(ctx)
+func (_c *AnnouncementCreateBulk) OnConflictColumns(columns ...string) *AnnouncementUpsertBulk {
+ _c.conflict = append(_c.conflict, sql.ConflictColumns(columns...))
+ return &AnnouncementUpsertBulk{
+ create: _c,
+ }
+}
+
+// AnnouncementUpsertBulk is the builder for "upsert"-ing
+// a bulk of Announcement nodes.
+type AnnouncementUpsertBulk struct {
+ create *AnnouncementCreateBulk
+}
+
+// UpdateNewValues updates the mutable fields using the new values that
+// were set on create. Using this option is equivalent to using:
+//
+// client.Announcement.Create().
+// OnConflict(
+// sql.ResolveWithNewValues(),
+// ).
+// Exec(ctx)
+func (u *AnnouncementUpsertBulk) UpdateNewValues() *AnnouncementUpsertBulk {
+ u.create.conflict = append(u.create.conflict, sql.ResolveWithNewValues())
+ u.create.conflict = append(u.create.conflict, sql.ResolveWith(func(s *sql.UpdateSet) {
+ for _, b := range u.create.builders {
+ if _, exists := b.mutation.CreatedAt(); exists {
+ s.SetIgnore(announcement.FieldCreatedAt)
+ }
+ }
+ }))
+ return u
+}
+
+// Ignore sets each column to itself in case of conflict.
+// Using this option is equivalent to using:
+//
+// client.Announcement.Create().
+// OnConflict(sql.ResolveWithIgnore()).
+// Exec(ctx)
+func (u *AnnouncementUpsertBulk) Ignore() *AnnouncementUpsertBulk {
+ u.create.conflict = append(u.create.conflict, sql.ResolveWithIgnore())
+ return u
+}
+
+// DoNothing configures the conflict_action to `DO NOTHING`.
+// Supported only by SQLite and PostgreSQL.
+func (u *AnnouncementUpsertBulk) DoNothing() *AnnouncementUpsertBulk {
+ u.create.conflict = append(u.create.conflict, sql.DoNothing())
+ return u
+}
+
+// Update allows overriding fields `UPDATE` values. See the AnnouncementCreateBulk.OnConflict
+// documentation for more info.
+func (u *AnnouncementUpsertBulk) Update(set func(*AnnouncementUpsert)) *AnnouncementUpsertBulk {
+ u.create.conflict = append(u.create.conflict, sql.ResolveWith(func(update *sql.UpdateSet) {
+ set(&AnnouncementUpsert{UpdateSet: update})
+ }))
+ return u
+}
+
+// SetTitle sets the "title" field.
+func (u *AnnouncementUpsertBulk) SetTitle(v string) *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetTitle(v)
+ })
+}
+
+// UpdateTitle sets the "title" field to the value that was provided on create.
+func (u *AnnouncementUpsertBulk) UpdateTitle() *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateTitle()
+ })
+}
+
+// SetContent sets the "content" field.
+func (u *AnnouncementUpsertBulk) SetContent(v string) *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetContent(v)
+ })
+}
+
+// UpdateContent sets the "content" field to the value that was provided on create.
+func (u *AnnouncementUpsertBulk) UpdateContent() *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateContent()
+ })
+}
+
+// SetStatus sets the "status" field.
+func (u *AnnouncementUpsertBulk) SetStatus(v string) *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetStatus(v)
+ })
+}
+
+// UpdateStatus sets the "status" field to the value that was provided on create.
+func (u *AnnouncementUpsertBulk) UpdateStatus() *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateStatus()
+ })
+}
+
+// SetTargeting sets the "targeting" field.
+func (u *AnnouncementUpsertBulk) SetTargeting(v domain.AnnouncementTargeting) *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetTargeting(v)
+ })
+}
+
+// UpdateTargeting sets the "targeting" field to the value that was provided on create.
+func (u *AnnouncementUpsertBulk) UpdateTargeting() *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateTargeting()
+ })
+}
+
+// ClearTargeting clears the value of the "targeting" field.
+func (u *AnnouncementUpsertBulk) ClearTargeting() *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.ClearTargeting()
+ })
+}
+
+// SetStartsAt sets the "starts_at" field.
+func (u *AnnouncementUpsertBulk) SetStartsAt(v time.Time) *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetStartsAt(v)
+ })
+}
+
+// UpdateStartsAt sets the "starts_at" field to the value that was provided on create.
+func (u *AnnouncementUpsertBulk) UpdateStartsAt() *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateStartsAt()
+ })
+}
+
+// ClearStartsAt clears the value of the "starts_at" field.
+func (u *AnnouncementUpsertBulk) ClearStartsAt() *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.ClearStartsAt()
+ })
+}
+
+// SetEndsAt sets the "ends_at" field.
+func (u *AnnouncementUpsertBulk) SetEndsAt(v time.Time) *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetEndsAt(v)
+ })
+}
+
+// UpdateEndsAt sets the "ends_at" field to the value that was provided on create.
+func (u *AnnouncementUpsertBulk) UpdateEndsAt() *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateEndsAt()
+ })
+}
+
+// ClearEndsAt clears the value of the "ends_at" field.
+func (u *AnnouncementUpsertBulk) ClearEndsAt() *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.ClearEndsAt()
+ })
+}
+
+// SetCreatedBy sets the "created_by" field.
+func (u *AnnouncementUpsertBulk) SetCreatedBy(v int64) *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetCreatedBy(v)
+ })
+}
+
+// AddCreatedBy adds v to the "created_by" field.
+func (u *AnnouncementUpsertBulk) AddCreatedBy(v int64) *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.AddCreatedBy(v)
+ })
+}
+
+// UpdateCreatedBy sets the "created_by" field to the value that was provided on create.
+func (u *AnnouncementUpsertBulk) UpdateCreatedBy() *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateCreatedBy()
+ })
+}
+
+// ClearCreatedBy clears the value of the "created_by" field.
+func (u *AnnouncementUpsertBulk) ClearCreatedBy() *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.ClearCreatedBy()
+ })
+}
+
+// SetUpdatedBy sets the "updated_by" field.
+func (u *AnnouncementUpsertBulk) SetUpdatedBy(v int64) *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetUpdatedBy(v)
+ })
+}
+
+// AddUpdatedBy adds v to the "updated_by" field.
+func (u *AnnouncementUpsertBulk) AddUpdatedBy(v int64) *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.AddUpdatedBy(v)
+ })
+}
+
+// UpdateUpdatedBy sets the "updated_by" field to the value that was provided on create.
+func (u *AnnouncementUpsertBulk) UpdateUpdatedBy() *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateUpdatedBy()
+ })
+}
+
+// ClearUpdatedBy clears the value of the "updated_by" field.
+func (u *AnnouncementUpsertBulk) ClearUpdatedBy() *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.ClearUpdatedBy()
+ })
+}
+
+// SetUpdatedAt sets the "updated_at" field.
+func (u *AnnouncementUpsertBulk) SetUpdatedAt(v time.Time) *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.SetUpdatedAt(v)
+ })
+}
+
+// UpdateUpdatedAt sets the "updated_at" field to the value that was provided on create.
+func (u *AnnouncementUpsertBulk) UpdateUpdatedAt() *AnnouncementUpsertBulk {
+ return u.Update(func(s *AnnouncementUpsert) {
+ s.UpdateUpdatedAt()
+ })
+}
+
+// Exec executes the query.
+func (u *AnnouncementUpsertBulk) Exec(ctx context.Context) error {
+ if u.create.err != nil {
+ return u.create.err
+ }
+ for i, b := range u.create.builders {
+ if len(b.conflict) != 0 {
+ return fmt.Errorf("ent: OnConflict was set for builder %d. Set it on the AnnouncementCreateBulk instead", i)
+ }
+ }
+ if len(u.create.conflict) == 0 {
+ return errors.New("ent: missing options for AnnouncementCreateBulk.OnConflict")
+ }
+ return u.create.Exec(ctx)
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (u *AnnouncementUpsertBulk) ExecX(ctx context.Context) {
+ if err := u.create.Exec(ctx); err != nil {
+ panic(err)
+ }
+}
diff --git a/backend/ent/announcement_delete.go b/backend/ent/announcement_delete.go
new file mode 100644
index 00000000..d185e9f7
--- /dev/null
+++ b/backend/ent/announcement_delete.go
@@ -0,0 +1,88 @@
+// Code generated by ent, DO NOT EDIT.
+
+package ent
+
+import (
+ "context"
+
+ "entgo.io/ent/dialect/sql"
+ "entgo.io/ent/dialect/sql/sqlgraph"
+ "entgo.io/ent/schema/field"
+ "github.com/Wei-Shaw/sub2api/ent/announcement"
+ "github.com/Wei-Shaw/sub2api/ent/predicate"
+)
+
+// AnnouncementDelete is the builder for deleting a Announcement entity.
+type AnnouncementDelete struct {
+ config
+ hooks []Hook
+ mutation *AnnouncementMutation
+}
+
+// Where appends a list predicates to the AnnouncementDelete builder.
+func (_d *AnnouncementDelete) Where(ps ...predicate.Announcement) *AnnouncementDelete {
+ _d.mutation.Where(ps...)
+ return _d
+}
+
+// Exec executes the deletion query and returns how many vertices were deleted.
+func (_d *AnnouncementDelete) Exec(ctx context.Context) (int, error) {
+ return withHooks(ctx, _d.sqlExec, _d.mutation, _d.hooks)
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (_d *AnnouncementDelete) ExecX(ctx context.Context) int {
+ n, err := _d.Exec(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return n
+}
+
+func (_d *AnnouncementDelete) sqlExec(ctx context.Context) (int, error) {
+ _spec := sqlgraph.NewDeleteSpec(announcement.Table, sqlgraph.NewFieldSpec(announcement.FieldID, field.TypeInt64))
+ if ps := _d.mutation.predicates; len(ps) > 0 {
+ _spec.Predicate = func(selector *sql.Selector) {
+ for i := range ps {
+ ps[i](selector)
+ }
+ }
+ }
+ affected, err := sqlgraph.DeleteNodes(ctx, _d.driver, _spec)
+ if err != nil && sqlgraph.IsConstraintError(err) {
+ err = &ConstraintError{msg: err.Error(), wrap: err}
+ }
+ _d.mutation.done = true
+ return affected, err
+}
+
+// AnnouncementDeleteOne is the builder for deleting a single Announcement entity.
+type AnnouncementDeleteOne struct {
+ _d *AnnouncementDelete
+}
+
+// Where appends a list predicates to the AnnouncementDelete builder.
+func (_d *AnnouncementDeleteOne) Where(ps ...predicate.Announcement) *AnnouncementDeleteOne {
+ _d._d.mutation.Where(ps...)
+ return _d
+}
+
+// Exec executes the deletion query.
+func (_d *AnnouncementDeleteOne) Exec(ctx context.Context) error {
+ n, err := _d._d.Exec(ctx)
+ switch {
+ case err != nil:
+ return err
+ case n == 0:
+ return &NotFoundError{announcement.Label}
+ default:
+ return nil
+ }
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (_d *AnnouncementDeleteOne) ExecX(ctx context.Context) {
+ if err := _d.Exec(ctx); err != nil {
+ panic(err)
+ }
+}
diff --git a/backend/ent/announcement_query.go b/backend/ent/announcement_query.go
new file mode 100644
index 00000000..a27d50fa
--- /dev/null
+++ b/backend/ent/announcement_query.go
@@ -0,0 +1,643 @@
+// Code generated by ent, DO NOT EDIT.
+
+package ent
+
+import (
+ "context"
+ "database/sql/driver"
+ "fmt"
+ "math"
+
+ "entgo.io/ent"
+ "entgo.io/ent/dialect"
+ "entgo.io/ent/dialect/sql"
+ "entgo.io/ent/dialect/sql/sqlgraph"
+ "entgo.io/ent/schema/field"
+ "github.com/Wei-Shaw/sub2api/ent/announcement"
+ "github.com/Wei-Shaw/sub2api/ent/announcementread"
+ "github.com/Wei-Shaw/sub2api/ent/predicate"
+)
+
+// AnnouncementQuery is the builder for querying Announcement entities.
+type AnnouncementQuery struct {
+ config
+ ctx *QueryContext
+ order []announcement.OrderOption
+ inters []Interceptor
+ predicates []predicate.Announcement
+ withReads *AnnouncementReadQuery
+ modifiers []func(*sql.Selector)
+ // intermediate query (i.e. traversal path).
+ sql *sql.Selector
+ path func(context.Context) (*sql.Selector, error)
+}
+
+// Where adds a new predicate for the AnnouncementQuery builder.
+func (_q *AnnouncementQuery) Where(ps ...predicate.Announcement) *AnnouncementQuery {
+ _q.predicates = append(_q.predicates, ps...)
+ return _q
+}
+
+// Limit the number of records to be returned by this query.
+func (_q *AnnouncementQuery) Limit(limit int) *AnnouncementQuery {
+ _q.ctx.Limit = &limit
+ return _q
+}
+
+// Offset to start from.
+func (_q *AnnouncementQuery) Offset(offset int) *AnnouncementQuery {
+ _q.ctx.Offset = &offset
+ return _q
+}
+
+// Unique configures the query builder to filter duplicate records on query.
+// By default, unique is set to true, and can be disabled using this method.
+func (_q *AnnouncementQuery) Unique(unique bool) *AnnouncementQuery {
+ _q.ctx.Unique = &unique
+ return _q
+}
+
+// Order specifies how the records should be ordered.
+func (_q *AnnouncementQuery) Order(o ...announcement.OrderOption) *AnnouncementQuery {
+ _q.order = append(_q.order, o...)
+ return _q
+}
+
+// QueryReads chains the current query on the "reads" edge.
+func (_q *AnnouncementQuery) QueryReads() *AnnouncementReadQuery {
+ query := (&AnnouncementReadClient{config: _q.config}).Query()
+ query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
+ if err := _q.prepareQuery(ctx); err != nil {
+ return nil, err
+ }
+ selector := _q.sqlQuery(ctx)
+ if err := selector.Err(); err != nil {
+ return nil, err
+ }
+ step := sqlgraph.NewStep(
+ sqlgraph.From(announcement.Table, announcement.FieldID, selector),
+ sqlgraph.To(announcementread.Table, announcementread.FieldID),
+ sqlgraph.Edge(sqlgraph.O2M, false, announcement.ReadsTable, announcement.ReadsColumn),
+ )
+ fromU = sqlgraph.SetNeighbors(_q.driver.Dialect(), step)
+ return fromU, nil
+ }
+ return query
+}
+
+// First returns the first Announcement entity from the query.
+// Returns a *NotFoundError when no Announcement was found.
+func (_q *AnnouncementQuery) First(ctx context.Context) (*Announcement, error) {
+ nodes, err := _q.Limit(1).All(setContextOp(ctx, _q.ctx, ent.OpQueryFirst))
+ if err != nil {
+ return nil, err
+ }
+ if len(nodes) == 0 {
+ return nil, &NotFoundError{announcement.Label}
+ }
+ return nodes[0], nil
+}
+
+// FirstX is like First, but panics if an error occurs.
+func (_q *AnnouncementQuery) FirstX(ctx context.Context) *Announcement {
+ node, err := _q.First(ctx)
+ if err != nil && !IsNotFound(err) {
+ panic(err)
+ }
+ return node
+}
+
+// FirstID returns the first Announcement ID from the query.
+// Returns a *NotFoundError when no Announcement ID was found.
+func (_q *AnnouncementQuery) FirstID(ctx context.Context) (id int64, err error) {
+ var ids []int64
+ if ids, err = _q.Limit(1).IDs(setContextOp(ctx, _q.ctx, ent.OpQueryFirstID)); err != nil {
+ return
+ }
+ if len(ids) == 0 {
+ err = &NotFoundError{announcement.Label}
+ return
+ }
+ return ids[0], nil
+}
+
+// FirstIDX is like FirstID, but panics if an error occurs.
+func (_q *AnnouncementQuery) FirstIDX(ctx context.Context) int64 {
+ id, err := _q.FirstID(ctx)
+ if err != nil && !IsNotFound(err) {
+ panic(err)
+ }
+ return id
+}
+
+// Only returns a single Announcement entity found by the query, ensuring it only returns one.
+// Returns a *NotSingularError when more than one Announcement entity is found.
+// Returns a *NotFoundError when no Announcement entities are found.
+func (_q *AnnouncementQuery) Only(ctx context.Context) (*Announcement, error) {
+ nodes, err := _q.Limit(2).All(setContextOp(ctx, _q.ctx, ent.OpQueryOnly))
+ if err != nil {
+ return nil, err
+ }
+ switch len(nodes) {
+ case 1:
+ return nodes[0], nil
+ case 0:
+ return nil, &NotFoundError{announcement.Label}
+ default:
+ return nil, &NotSingularError{announcement.Label}
+ }
+}
+
+// OnlyX is like Only, but panics if an error occurs.
+func (_q *AnnouncementQuery) OnlyX(ctx context.Context) *Announcement {
+ node, err := _q.Only(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return node
+}
+
+// OnlyID is like Only, but returns the only Announcement ID in the query.
+// Returns a *NotSingularError when more than one Announcement ID is found.
+// Returns a *NotFoundError when no entities are found.
+func (_q *AnnouncementQuery) OnlyID(ctx context.Context) (id int64, err error) {
+ var ids []int64
+ if ids, err = _q.Limit(2).IDs(setContextOp(ctx, _q.ctx, ent.OpQueryOnlyID)); err != nil {
+ return
+ }
+ switch len(ids) {
+ case 1:
+ id = ids[0]
+ case 0:
+ err = &NotFoundError{announcement.Label}
+ default:
+ err = &NotSingularError{announcement.Label}
+ }
+ return
+}
+
+// OnlyIDX is like OnlyID, but panics if an error occurs.
+func (_q *AnnouncementQuery) OnlyIDX(ctx context.Context) int64 {
+ id, err := _q.OnlyID(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return id
+}
+
+// All executes the query and returns a list of Announcements.
+func (_q *AnnouncementQuery) All(ctx context.Context) ([]*Announcement, error) {
+ ctx = setContextOp(ctx, _q.ctx, ent.OpQueryAll)
+ if err := _q.prepareQuery(ctx); err != nil {
+ return nil, err
+ }
+ qr := querierAll[[]*Announcement, *AnnouncementQuery]()
+ return withInterceptors[[]*Announcement](ctx, _q, qr, _q.inters)
+}
+
+// AllX is like All, but panics if an error occurs.
+func (_q *AnnouncementQuery) AllX(ctx context.Context) []*Announcement {
+ nodes, err := _q.All(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return nodes
+}
+
+// IDs executes the query and returns a list of Announcement IDs.
+func (_q *AnnouncementQuery) IDs(ctx context.Context) (ids []int64, err error) {
+ if _q.ctx.Unique == nil && _q.path != nil {
+ _q.Unique(true)
+ }
+ ctx = setContextOp(ctx, _q.ctx, ent.OpQueryIDs)
+ if err = _q.Select(announcement.FieldID).Scan(ctx, &ids); err != nil {
+ return nil, err
+ }
+ return ids, nil
+}
+
+// IDsX is like IDs, but panics if an error occurs.
+func (_q *AnnouncementQuery) IDsX(ctx context.Context) []int64 {
+ ids, err := _q.IDs(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return ids
+}
+
+// Count returns the count of the given query.
+func (_q *AnnouncementQuery) Count(ctx context.Context) (int, error) {
+ ctx = setContextOp(ctx, _q.ctx, ent.OpQueryCount)
+ if err := _q.prepareQuery(ctx); err != nil {
+ return 0, err
+ }
+ return withInterceptors[int](ctx, _q, querierCount[*AnnouncementQuery](), _q.inters)
+}
+
+// CountX is like Count, but panics if an error occurs.
+func (_q *AnnouncementQuery) CountX(ctx context.Context) int {
+ count, err := _q.Count(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return count
+}
+
+// Exist returns true if the query has elements in the graph.
+func (_q *AnnouncementQuery) Exist(ctx context.Context) (bool, error) {
+ ctx = setContextOp(ctx, _q.ctx, ent.OpQueryExist)
+ switch _, err := _q.FirstID(ctx); {
+ case IsNotFound(err):
+ return false, nil
+ case err != nil:
+ return false, fmt.Errorf("ent: check existence: %w", err)
+ default:
+ return true, nil
+ }
+}
+
+// ExistX is like Exist, but panics if an error occurs.
+func (_q *AnnouncementQuery) ExistX(ctx context.Context) bool {
+ exist, err := _q.Exist(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return exist
+}
+
+// Clone returns a duplicate of the AnnouncementQuery builder, including all associated steps. It can be
+// used to prepare common query builders and use them differently after the clone is made.
+func (_q *AnnouncementQuery) Clone() *AnnouncementQuery {
+ if _q == nil {
+ return nil
+ }
+ return &AnnouncementQuery{
+ config: _q.config,
+ ctx: _q.ctx.Clone(),
+ order: append([]announcement.OrderOption{}, _q.order...),
+ inters: append([]Interceptor{}, _q.inters...),
+ predicates: append([]predicate.Announcement{}, _q.predicates...),
+ withReads: _q.withReads.Clone(),
+ // clone intermediate query.
+ sql: _q.sql.Clone(),
+ path: _q.path,
+ }
+}
+
+// WithReads tells the query-builder to eager-load the nodes that are connected to
+// the "reads" edge. The optional arguments are used to configure the query builder of the edge.
+func (_q *AnnouncementQuery) WithReads(opts ...func(*AnnouncementReadQuery)) *AnnouncementQuery {
+ query := (&AnnouncementReadClient{config: _q.config}).Query()
+ for _, opt := range opts {
+ opt(query)
+ }
+ _q.withReads = query
+ return _q
+}
+
+// GroupBy is used to group vertices by one or more fields/columns.
+// It is often used with aggregate functions, like: count, max, mean, min, sum.
+//
+// Example:
+//
+// var v []struct {
+// Title string `json:"title,omitempty"`
+// Count int `json:"count,omitempty"`
+// }
+//
+// client.Announcement.Query().
+// GroupBy(announcement.FieldTitle).
+// Aggregate(ent.Count()).
+// Scan(ctx, &v)
+func (_q *AnnouncementQuery) GroupBy(field string, fields ...string) *AnnouncementGroupBy {
+ _q.ctx.Fields = append([]string{field}, fields...)
+ grbuild := &AnnouncementGroupBy{build: _q}
+ grbuild.flds = &_q.ctx.Fields
+ grbuild.label = announcement.Label
+ grbuild.scan = grbuild.Scan
+ return grbuild
+}
+
+// Select allows the selection one or more fields/columns for the given query,
+// instead of selecting all fields in the entity.
+//
+// Example:
+//
+// var v []struct {
+// Title string `json:"title,omitempty"`
+// }
+//
+// client.Announcement.Query().
+// Select(announcement.FieldTitle).
+// Scan(ctx, &v)
+func (_q *AnnouncementQuery) Select(fields ...string) *AnnouncementSelect {
+ _q.ctx.Fields = append(_q.ctx.Fields, fields...)
+ sbuild := &AnnouncementSelect{AnnouncementQuery: _q}
+ sbuild.label = announcement.Label
+ sbuild.flds, sbuild.scan = &_q.ctx.Fields, sbuild.Scan
+ return sbuild
+}
+
+// Aggregate returns a AnnouncementSelect configured with the given aggregations.
+func (_q *AnnouncementQuery) Aggregate(fns ...AggregateFunc) *AnnouncementSelect {
+ return _q.Select().Aggregate(fns...)
+}
+
+func (_q *AnnouncementQuery) prepareQuery(ctx context.Context) error {
+ for _, inter := range _q.inters {
+ if inter == nil {
+ return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)")
+ }
+ if trv, ok := inter.(Traverser); ok {
+ if err := trv.Traverse(ctx, _q); err != nil {
+ return err
+ }
+ }
+ }
+ for _, f := range _q.ctx.Fields {
+ if !announcement.ValidColumn(f) {
+ return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
+ }
+ }
+ if _q.path != nil {
+ prev, err := _q.path(ctx)
+ if err != nil {
+ return err
+ }
+ _q.sql = prev
+ }
+ return nil
+}
+
+func (_q *AnnouncementQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Announcement, error) {
+ var (
+ nodes = []*Announcement{}
+ _spec = _q.querySpec()
+ loadedTypes = [1]bool{
+ _q.withReads != nil,
+ }
+ )
+ _spec.ScanValues = func(columns []string) ([]any, error) {
+ return (*Announcement).scanValues(nil, columns)
+ }
+ _spec.Assign = func(columns []string, values []any) error {
+ node := &Announcement{config: _q.config}
+ nodes = append(nodes, node)
+ node.Edges.loadedTypes = loadedTypes
+ return node.assignValues(columns, values)
+ }
+ if len(_q.modifiers) > 0 {
+ _spec.Modifiers = _q.modifiers
+ }
+ for i := range hooks {
+ hooks[i](ctx, _spec)
+ }
+ if err := sqlgraph.QueryNodes(ctx, _q.driver, _spec); err != nil {
+ return nil, err
+ }
+ if len(nodes) == 0 {
+ return nodes, nil
+ }
+ if query := _q.withReads; query != nil {
+ if err := _q.loadReads(ctx, query, nodes,
+ func(n *Announcement) { n.Edges.Reads = []*AnnouncementRead{} },
+ func(n *Announcement, e *AnnouncementRead) { n.Edges.Reads = append(n.Edges.Reads, e) }); err != nil {
+ return nil, err
+ }
+ }
+ return nodes, nil
+}
+
+func (_q *AnnouncementQuery) loadReads(ctx context.Context, query *AnnouncementReadQuery, nodes []*Announcement, init func(*Announcement), assign func(*Announcement, *AnnouncementRead)) error {
+ fks := make([]driver.Value, 0, len(nodes))
+ nodeids := make(map[int64]*Announcement)
+ for i := range nodes {
+ fks = append(fks, nodes[i].ID)
+ nodeids[nodes[i].ID] = nodes[i]
+ if init != nil {
+ init(nodes[i])
+ }
+ }
+ if len(query.ctx.Fields) > 0 {
+ query.ctx.AppendFieldOnce(announcementread.FieldAnnouncementID)
+ }
+ query.Where(predicate.AnnouncementRead(func(s *sql.Selector) {
+ s.Where(sql.InValues(s.C(announcement.ReadsColumn), fks...))
+ }))
+ neighbors, err := query.All(ctx)
+ if err != nil {
+ return err
+ }
+ for _, n := range neighbors {
+ fk := n.AnnouncementID
+ node, ok := nodeids[fk]
+ if !ok {
+ return fmt.Errorf(`unexpected referenced foreign-key "announcement_id" returned %v for node %v`, fk, n.ID)
+ }
+ assign(node, n)
+ }
+ return nil
+}
+
+func (_q *AnnouncementQuery) sqlCount(ctx context.Context) (int, error) {
+ _spec := _q.querySpec()
+ if len(_q.modifiers) > 0 {
+ _spec.Modifiers = _q.modifiers
+ }
+ _spec.Node.Columns = _q.ctx.Fields
+ if len(_q.ctx.Fields) > 0 {
+ _spec.Unique = _q.ctx.Unique != nil && *_q.ctx.Unique
+ }
+ return sqlgraph.CountNodes(ctx, _q.driver, _spec)
+}
+
+func (_q *AnnouncementQuery) querySpec() *sqlgraph.QuerySpec {
+ _spec := sqlgraph.NewQuerySpec(announcement.Table, announcement.Columns, sqlgraph.NewFieldSpec(announcement.FieldID, field.TypeInt64))
+ _spec.From = _q.sql
+ if unique := _q.ctx.Unique; unique != nil {
+ _spec.Unique = *unique
+ } else if _q.path != nil {
+ _spec.Unique = true
+ }
+ if fields := _q.ctx.Fields; len(fields) > 0 {
+ _spec.Node.Columns = make([]string, 0, len(fields))
+ _spec.Node.Columns = append(_spec.Node.Columns, announcement.FieldID)
+ for i := range fields {
+ if fields[i] != announcement.FieldID {
+ _spec.Node.Columns = append(_spec.Node.Columns, fields[i])
+ }
+ }
+ }
+ if ps := _q.predicates; len(ps) > 0 {
+ _spec.Predicate = func(selector *sql.Selector) {
+ for i := range ps {
+ ps[i](selector)
+ }
+ }
+ }
+ if limit := _q.ctx.Limit; limit != nil {
+ _spec.Limit = *limit
+ }
+ if offset := _q.ctx.Offset; offset != nil {
+ _spec.Offset = *offset
+ }
+ if ps := _q.order; len(ps) > 0 {
+ _spec.Order = func(selector *sql.Selector) {
+ for i := range ps {
+ ps[i](selector)
+ }
+ }
+ }
+ return _spec
+}
+
+func (_q *AnnouncementQuery) sqlQuery(ctx context.Context) *sql.Selector {
+ builder := sql.Dialect(_q.driver.Dialect())
+ t1 := builder.Table(announcement.Table)
+ columns := _q.ctx.Fields
+ if len(columns) == 0 {
+ columns = announcement.Columns
+ }
+ selector := builder.Select(t1.Columns(columns...)...).From(t1)
+ if _q.sql != nil {
+ selector = _q.sql
+ selector.Select(selector.Columns(columns...)...)
+ }
+ if _q.ctx.Unique != nil && *_q.ctx.Unique {
+ selector.Distinct()
+ }
+ for _, m := range _q.modifiers {
+ m(selector)
+ }
+ for _, p := range _q.predicates {
+ p(selector)
+ }
+ for _, p := range _q.order {
+ p(selector)
+ }
+ if offset := _q.ctx.Offset; offset != nil {
+ // limit is mandatory for offset clause. We start
+ // with default value, and override it below if needed.
+ selector.Offset(*offset).Limit(math.MaxInt32)
+ }
+ if limit := _q.ctx.Limit; limit != nil {
+ selector.Limit(*limit)
+ }
+ return selector
+}
+
+// ForUpdate locks the selected rows against concurrent updates, and prevent them from being
+// updated, deleted or "selected ... for update" by other sessions, until the transaction is
+// either committed or rolled-back.
+func (_q *AnnouncementQuery) ForUpdate(opts ...sql.LockOption) *AnnouncementQuery {
+ if _q.driver.Dialect() == dialect.Postgres {
+ _q.Unique(false)
+ }
+ _q.modifiers = append(_q.modifiers, func(s *sql.Selector) {
+ s.ForUpdate(opts...)
+ })
+ return _q
+}
+
+// ForShare behaves similarly to ForUpdate, except that it acquires a shared mode lock
+// on any rows that are read. Other sessions can read the rows, but cannot modify them
+// until your transaction commits.
+func (_q *AnnouncementQuery) ForShare(opts ...sql.LockOption) *AnnouncementQuery {
+ if _q.driver.Dialect() == dialect.Postgres {
+ _q.Unique(false)
+ }
+ _q.modifiers = append(_q.modifiers, func(s *sql.Selector) {
+ s.ForShare(opts...)
+ })
+ return _q
+}
+
+// AnnouncementGroupBy is the group-by builder for Announcement entities.
+type AnnouncementGroupBy struct {
+ selector
+ build *AnnouncementQuery
+}
+
+// Aggregate adds the given aggregation functions to the group-by query.
+func (_g *AnnouncementGroupBy) Aggregate(fns ...AggregateFunc) *AnnouncementGroupBy {
+ _g.fns = append(_g.fns, fns...)
+ return _g
+}
+
+// Scan applies the selector query and scans the result into the given value.
+func (_g *AnnouncementGroupBy) Scan(ctx context.Context, v any) error {
+ ctx = setContextOp(ctx, _g.build.ctx, ent.OpQueryGroupBy)
+ if err := _g.build.prepareQuery(ctx); err != nil {
+ return err
+ }
+ return scanWithInterceptors[*AnnouncementQuery, *AnnouncementGroupBy](ctx, _g.build, _g, _g.build.inters, v)
+}
+
+func (_g *AnnouncementGroupBy) sqlScan(ctx context.Context, root *AnnouncementQuery, v any) error {
+ selector := root.sqlQuery(ctx).Select()
+ aggregation := make([]string, 0, len(_g.fns))
+ for _, fn := range _g.fns {
+ aggregation = append(aggregation, fn(selector))
+ }
+ if len(selector.SelectedColumns()) == 0 {
+ columns := make([]string, 0, len(*_g.flds)+len(_g.fns))
+ for _, f := range *_g.flds {
+ columns = append(columns, selector.C(f))
+ }
+ columns = append(columns, aggregation...)
+ selector.Select(columns...)
+ }
+ selector.GroupBy(selector.Columns(*_g.flds...)...)
+ if err := selector.Err(); err != nil {
+ return err
+ }
+ rows := &sql.Rows{}
+ query, args := selector.Query()
+ if err := _g.build.driver.Query(ctx, query, args, rows); err != nil {
+ return err
+ }
+ defer rows.Close()
+ return sql.ScanSlice(rows, v)
+}
+
+// AnnouncementSelect is the builder for selecting fields of Announcement entities.
+type AnnouncementSelect struct {
+ *AnnouncementQuery
+ selector
+}
+
+// Aggregate adds the given aggregation functions to the selector query.
+func (_s *AnnouncementSelect) Aggregate(fns ...AggregateFunc) *AnnouncementSelect {
+ _s.fns = append(_s.fns, fns...)
+ return _s
+}
+
+// Scan applies the selector query and scans the result into the given value.
+func (_s *AnnouncementSelect) Scan(ctx context.Context, v any) error {
+ ctx = setContextOp(ctx, _s.ctx, ent.OpQuerySelect)
+ if err := _s.prepareQuery(ctx); err != nil {
+ return err
+ }
+ return scanWithInterceptors[*AnnouncementQuery, *AnnouncementSelect](ctx, _s.AnnouncementQuery, _s, _s.inters, v)
+}
+
+func (_s *AnnouncementSelect) sqlScan(ctx context.Context, root *AnnouncementQuery, v any) error {
+ selector := root.sqlQuery(ctx)
+ aggregation := make([]string, 0, len(_s.fns))
+ for _, fn := range _s.fns {
+ aggregation = append(aggregation, fn(selector))
+ }
+ switch n := len(*_s.selector.flds); {
+ case n == 0 && len(aggregation) > 0:
+ selector.Select(aggregation...)
+ case n != 0 && len(aggregation) > 0:
+ selector.AppendSelect(aggregation...)
+ }
+ rows := &sql.Rows{}
+ query, args := selector.Query()
+ if err := _s.driver.Query(ctx, query, args, rows); err != nil {
+ return err
+ }
+ defer rows.Close()
+ return sql.ScanSlice(rows, v)
+}
diff --git a/backend/ent/announcement_update.go b/backend/ent/announcement_update.go
new file mode 100644
index 00000000..702d0817
--- /dev/null
+++ b/backend/ent/announcement_update.go
@@ -0,0 +1,824 @@
+// Code generated by ent, DO NOT EDIT.
+
+package ent
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "time"
+
+ "entgo.io/ent/dialect/sql"
+ "entgo.io/ent/dialect/sql/sqlgraph"
+ "entgo.io/ent/schema/field"
+ "github.com/Wei-Shaw/sub2api/ent/announcement"
+ "github.com/Wei-Shaw/sub2api/ent/announcementread"
+ "github.com/Wei-Shaw/sub2api/ent/predicate"
+ "github.com/Wei-Shaw/sub2api/internal/domain"
+)
+
+// AnnouncementUpdate is the builder for updating Announcement entities.
+type AnnouncementUpdate struct {
+ config
+ hooks []Hook
+ mutation *AnnouncementMutation
+}
+
+// Where appends a list predicates to the AnnouncementUpdate builder.
+func (_u *AnnouncementUpdate) Where(ps ...predicate.Announcement) *AnnouncementUpdate {
+ _u.mutation.Where(ps...)
+ return _u
+}
+
+// SetTitle sets the "title" field.
+func (_u *AnnouncementUpdate) SetTitle(v string) *AnnouncementUpdate {
+ _u.mutation.SetTitle(v)
+ return _u
+}
+
+// SetNillableTitle sets the "title" field if the given value is not nil.
+func (_u *AnnouncementUpdate) SetNillableTitle(v *string) *AnnouncementUpdate {
+ if v != nil {
+ _u.SetTitle(*v)
+ }
+ return _u
+}
+
+// SetContent sets the "content" field.
+func (_u *AnnouncementUpdate) SetContent(v string) *AnnouncementUpdate {
+ _u.mutation.SetContent(v)
+ return _u
+}
+
+// SetNillableContent sets the "content" field if the given value is not nil.
+func (_u *AnnouncementUpdate) SetNillableContent(v *string) *AnnouncementUpdate {
+ if v != nil {
+ _u.SetContent(*v)
+ }
+ return _u
+}
+
+// SetStatus sets the "status" field.
+func (_u *AnnouncementUpdate) SetStatus(v string) *AnnouncementUpdate {
+ _u.mutation.SetStatus(v)
+ return _u
+}
+
+// SetNillableStatus sets the "status" field if the given value is not nil.
+func (_u *AnnouncementUpdate) SetNillableStatus(v *string) *AnnouncementUpdate {
+ if v != nil {
+ _u.SetStatus(*v)
+ }
+ return _u
+}
+
+// SetTargeting sets the "targeting" field.
+func (_u *AnnouncementUpdate) SetTargeting(v domain.AnnouncementTargeting) *AnnouncementUpdate {
+ _u.mutation.SetTargeting(v)
+ return _u
+}
+
+// SetNillableTargeting sets the "targeting" field if the given value is not nil.
+func (_u *AnnouncementUpdate) SetNillableTargeting(v *domain.AnnouncementTargeting) *AnnouncementUpdate {
+ if v != nil {
+ _u.SetTargeting(*v)
+ }
+ return _u
+}
+
+// ClearTargeting clears the value of the "targeting" field.
+func (_u *AnnouncementUpdate) ClearTargeting() *AnnouncementUpdate {
+ _u.mutation.ClearTargeting()
+ return _u
+}
+
+// SetStartsAt sets the "starts_at" field.
+func (_u *AnnouncementUpdate) SetStartsAt(v time.Time) *AnnouncementUpdate {
+ _u.mutation.SetStartsAt(v)
+ return _u
+}
+
+// SetNillableStartsAt sets the "starts_at" field if the given value is not nil.
+func (_u *AnnouncementUpdate) SetNillableStartsAt(v *time.Time) *AnnouncementUpdate {
+ if v != nil {
+ _u.SetStartsAt(*v)
+ }
+ return _u
+}
+
+// ClearStartsAt clears the value of the "starts_at" field.
+func (_u *AnnouncementUpdate) ClearStartsAt() *AnnouncementUpdate {
+ _u.mutation.ClearStartsAt()
+ return _u
+}
+
+// SetEndsAt sets the "ends_at" field.
+func (_u *AnnouncementUpdate) SetEndsAt(v time.Time) *AnnouncementUpdate {
+ _u.mutation.SetEndsAt(v)
+ return _u
+}
+
+// SetNillableEndsAt sets the "ends_at" field if the given value is not nil.
+func (_u *AnnouncementUpdate) SetNillableEndsAt(v *time.Time) *AnnouncementUpdate {
+ if v != nil {
+ _u.SetEndsAt(*v)
+ }
+ return _u
+}
+
+// ClearEndsAt clears the value of the "ends_at" field.
+func (_u *AnnouncementUpdate) ClearEndsAt() *AnnouncementUpdate {
+ _u.mutation.ClearEndsAt()
+ return _u
+}
+
+// SetCreatedBy sets the "created_by" field.
+func (_u *AnnouncementUpdate) SetCreatedBy(v int64) *AnnouncementUpdate {
+ _u.mutation.ResetCreatedBy()
+ _u.mutation.SetCreatedBy(v)
+ return _u
+}
+
+// SetNillableCreatedBy sets the "created_by" field if the given value is not nil.
+func (_u *AnnouncementUpdate) SetNillableCreatedBy(v *int64) *AnnouncementUpdate {
+ if v != nil {
+ _u.SetCreatedBy(*v)
+ }
+ return _u
+}
+
+// AddCreatedBy adds value to the "created_by" field.
+func (_u *AnnouncementUpdate) AddCreatedBy(v int64) *AnnouncementUpdate {
+ _u.mutation.AddCreatedBy(v)
+ return _u
+}
+
+// ClearCreatedBy clears the value of the "created_by" field.
+func (_u *AnnouncementUpdate) ClearCreatedBy() *AnnouncementUpdate {
+ _u.mutation.ClearCreatedBy()
+ return _u
+}
+
+// SetUpdatedBy sets the "updated_by" field.
+func (_u *AnnouncementUpdate) SetUpdatedBy(v int64) *AnnouncementUpdate {
+ _u.mutation.ResetUpdatedBy()
+ _u.mutation.SetUpdatedBy(v)
+ return _u
+}
+
+// SetNillableUpdatedBy sets the "updated_by" field if the given value is not nil.
+func (_u *AnnouncementUpdate) SetNillableUpdatedBy(v *int64) *AnnouncementUpdate {
+ if v != nil {
+ _u.SetUpdatedBy(*v)
+ }
+ return _u
+}
+
+// AddUpdatedBy adds value to the "updated_by" field.
+func (_u *AnnouncementUpdate) AddUpdatedBy(v int64) *AnnouncementUpdate {
+ _u.mutation.AddUpdatedBy(v)
+ return _u
+}
+
+// ClearUpdatedBy clears the value of the "updated_by" field.
+func (_u *AnnouncementUpdate) ClearUpdatedBy() *AnnouncementUpdate {
+ _u.mutation.ClearUpdatedBy()
+ return _u
+}
+
+// SetUpdatedAt sets the "updated_at" field.
+func (_u *AnnouncementUpdate) SetUpdatedAt(v time.Time) *AnnouncementUpdate {
+ _u.mutation.SetUpdatedAt(v)
+ return _u
+}
+
+// AddReadIDs adds the "reads" edge to the AnnouncementRead entity by IDs.
+func (_u *AnnouncementUpdate) AddReadIDs(ids ...int64) *AnnouncementUpdate {
+ _u.mutation.AddReadIDs(ids...)
+ return _u
+}
+
+// AddReads adds the "reads" edges to the AnnouncementRead entity.
+func (_u *AnnouncementUpdate) AddReads(v ...*AnnouncementRead) *AnnouncementUpdate {
+ ids := make([]int64, len(v))
+ for i := range v {
+ ids[i] = v[i].ID
+ }
+ return _u.AddReadIDs(ids...)
+}
+
+// Mutation returns the AnnouncementMutation object of the builder.
+func (_u *AnnouncementUpdate) Mutation() *AnnouncementMutation {
+ return _u.mutation
+}
+
+// ClearReads clears all "reads" edges to the AnnouncementRead entity.
+func (_u *AnnouncementUpdate) ClearReads() *AnnouncementUpdate {
+ _u.mutation.ClearReads()
+ return _u
+}
+
+// RemoveReadIDs removes the "reads" edge to AnnouncementRead entities by IDs.
+func (_u *AnnouncementUpdate) RemoveReadIDs(ids ...int64) *AnnouncementUpdate {
+ _u.mutation.RemoveReadIDs(ids...)
+ return _u
+}
+
+// RemoveReads removes "reads" edges to AnnouncementRead entities.
+func (_u *AnnouncementUpdate) RemoveReads(v ...*AnnouncementRead) *AnnouncementUpdate {
+ ids := make([]int64, len(v))
+ for i := range v {
+ ids[i] = v[i].ID
+ }
+ return _u.RemoveReadIDs(ids...)
+}
+
+// Save executes the query and returns the number of nodes affected by the update operation.
+func (_u *AnnouncementUpdate) Save(ctx context.Context) (int, error) {
+ _u.defaults()
+ return withHooks(ctx, _u.sqlSave, _u.mutation, _u.hooks)
+}
+
+// SaveX is like Save, but panics if an error occurs.
+func (_u *AnnouncementUpdate) SaveX(ctx context.Context) int {
+ affected, err := _u.Save(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return affected
+}
+
+// Exec executes the query.
+func (_u *AnnouncementUpdate) Exec(ctx context.Context) error {
+ _, err := _u.Save(ctx)
+ return err
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (_u *AnnouncementUpdate) ExecX(ctx context.Context) {
+ if err := _u.Exec(ctx); err != nil {
+ panic(err)
+ }
+}
+
+// defaults sets the default values of the builder before save.
+func (_u *AnnouncementUpdate) defaults() {
+ if _, ok := _u.mutation.UpdatedAt(); !ok {
+ v := announcement.UpdateDefaultUpdatedAt()
+ _u.mutation.SetUpdatedAt(v)
+ }
+}
+
+// check runs all checks and user-defined validators on the builder.
+func (_u *AnnouncementUpdate) check() error {
+ if v, ok := _u.mutation.Title(); ok {
+ if err := announcement.TitleValidator(v); err != nil {
+ return &ValidationError{Name: "title", err: fmt.Errorf(`ent: validator failed for field "Announcement.title": %w`, err)}
+ }
+ }
+ if v, ok := _u.mutation.Content(); ok {
+ if err := announcement.ContentValidator(v); err != nil {
+ return &ValidationError{Name: "content", err: fmt.Errorf(`ent: validator failed for field "Announcement.content": %w`, err)}
+ }
+ }
+ if v, ok := _u.mutation.Status(); ok {
+ if err := announcement.StatusValidator(v); err != nil {
+ return &ValidationError{Name: "status", err: fmt.Errorf(`ent: validator failed for field "Announcement.status": %w`, err)}
+ }
+ }
+ return nil
+}
+
+func (_u *AnnouncementUpdate) sqlSave(ctx context.Context) (_node int, err error) {
+ if err := _u.check(); err != nil {
+ return _node, err
+ }
+ _spec := sqlgraph.NewUpdateSpec(announcement.Table, announcement.Columns, sqlgraph.NewFieldSpec(announcement.FieldID, field.TypeInt64))
+ if ps := _u.mutation.predicates; len(ps) > 0 {
+ _spec.Predicate = func(selector *sql.Selector) {
+ for i := range ps {
+ ps[i](selector)
+ }
+ }
+ }
+ if value, ok := _u.mutation.Title(); ok {
+ _spec.SetField(announcement.FieldTitle, field.TypeString, value)
+ }
+ if value, ok := _u.mutation.Content(); ok {
+ _spec.SetField(announcement.FieldContent, field.TypeString, value)
+ }
+ if value, ok := _u.mutation.Status(); ok {
+ _spec.SetField(announcement.FieldStatus, field.TypeString, value)
+ }
+ if value, ok := _u.mutation.Targeting(); ok {
+ _spec.SetField(announcement.FieldTargeting, field.TypeJSON, value)
+ }
+ if _u.mutation.TargetingCleared() {
+ _spec.ClearField(announcement.FieldTargeting, field.TypeJSON)
+ }
+ if value, ok := _u.mutation.StartsAt(); ok {
+ _spec.SetField(announcement.FieldStartsAt, field.TypeTime, value)
+ }
+ if _u.mutation.StartsAtCleared() {
+ _spec.ClearField(announcement.FieldStartsAt, field.TypeTime)
+ }
+ if value, ok := _u.mutation.EndsAt(); ok {
+ _spec.SetField(announcement.FieldEndsAt, field.TypeTime, value)
+ }
+ if _u.mutation.EndsAtCleared() {
+ _spec.ClearField(announcement.FieldEndsAt, field.TypeTime)
+ }
+ if value, ok := _u.mutation.CreatedBy(); ok {
+ _spec.SetField(announcement.FieldCreatedBy, field.TypeInt64, value)
+ }
+ if value, ok := _u.mutation.AddedCreatedBy(); ok {
+ _spec.AddField(announcement.FieldCreatedBy, field.TypeInt64, value)
+ }
+ if _u.mutation.CreatedByCleared() {
+ _spec.ClearField(announcement.FieldCreatedBy, field.TypeInt64)
+ }
+ if value, ok := _u.mutation.UpdatedBy(); ok {
+ _spec.SetField(announcement.FieldUpdatedBy, field.TypeInt64, value)
+ }
+ if value, ok := _u.mutation.AddedUpdatedBy(); ok {
+ _spec.AddField(announcement.FieldUpdatedBy, field.TypeInt64, value)
+ }
+ if _u.mutation.UpdatedByCleared() {
+ _spec.ClearField(announcement.FieldUpdatedBy, field.TypeInt64)
+ }
+ if value, ok := _u.mutation.UpdatedAt(); ok {
+ _spec.SetField(announcement.FieldUpdatedAt, field.TypeTime, value)
+ }
+ if _u.mutation.ReadsCleared() {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.O2M,
+ Inverse: false,
+ Table: announcement.ReadsTable,
+ Columns: []string{announcement.ReadsColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64),
+ },
+ }
+ _spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+ }
+ if nodes := _u.mutation.RemovedReadsIDs(); len(nodes) > 0 && !_u.mutation.ReadsCleared() {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.O2M,
+ Inverse: false,
+ Table: announcement.ReadsTable,
+ Columns: []string{announcement.ReadsColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64),
+ },
+ }
+ for _, k := range nodes {
+ edge.Target.Nodes = append(edge.Target.Nodes, k)
+ }
+ _spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+ }
+ if nodes := _u.mutation.ReadsIDs(); len(nodes) > 0 {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.O2M,
+ Inverse: false,
+ Table: announcement.ReadsTable,
+ Columns: []string{announcement.ReadsColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64),
+ },
+ }
+ for _, k := range nodes {
+ edge.Target.Nodes = append(edge.Target.Nodes, k)
+ }
+ _spec.Edges.Add = append(_spec.Edges.Add, edge)
+ }
+ if _node, err = sqlgraph.UpdateNodes(ctx, _u.driver, _spec); err != nil {
+ if _, ok := err.(*sqlgraph.NotFoundError); ok {
+ err = &NotFoundError{announcement.Label}
+ } else if sqlgraph.IsConstraintError(err) {
+ err = &ConstraintError{msg: err.Error(), wrap: err}
+ }
+ return 0, err
+ }
+ _u.mutation.done = true
+ return _node, nil
+}
+
+// AnnouncementUpdateOne is the builder for updating a single Announcement entity.
+type AnnouncementUpdateOne struct {
+ config
+ fields []string
+ hooks []Hook
+ mutation *AnnouncementMutation
+}
+
+// SetTitle sets the "title" field.
+func (_u *AnnouncementUpdateOne) SetTitle(v string) *AnnouncementUpdateOne {
+ _u.mutation.SetTitle(v)
+ return _u
+}
+
+// SetNillableTitle sets the "title" field if the given value is not nil.
+func (_u *AnnouncementUpdateOne) SetNillableTitle(v *string) *AnnouncementUpdateOne {
+ if v != nil {
+ _u.SetTitle(*v)
+ }
+ return _u
+}
+
+// SetContent sets the "content" field.
+func (_u *AnnouncementUpdateOne) SetContent(v string) *AnnouncementUpdateOne {
+ _u.mutation.SetContent(v)
+ return _u
+}
+
+// SetNillableContent sets the "content" field if the given value is not nil.
+func (_u *AnnouncementUpdateOne) SetNillableContent(v *string) *AnnouncementUpdateOne {
+ if v != nil {
+ _u.SetContent(*v)
+ }
+ return _u
+}
+
+// SetStatus sets the "status" field.
+func (_u *AnnouncementUpdateOne) SetStatus(v string) *AnnouncementUpdateOne {
+ _u.mutation.SetStatus(v)
+ return _u
+}
+
+// SetNillableStatus sets the "status" field if the given value is not nil.
+func (_u *AnnouncementUpdateOne) SetNillableStatus(v *string) *AnnouncementUpdateOne {
+ if v != nil {
+ _u.SetStatus(*v)
+ }
+ return _u
+}
+
+// SetTargeting sets the "targeting" field.
+func (_u *AnnouncementUpdateOne) SetTargeting(v domain.AnnouncementTargeting) *AnnouncementUpdateOne {
+ _u.mutation.SetTargeting(v)
+ return _u
+}
+
+// SetNillableTargeting sets the "targeting" field if the given value is not nil.
+func (_u *AnnouncementUpdateOne) SetNillableTargeting(v *domain.AnnouncementTargeting) *AnnouncementUpdateOne {
+ if v != nil {
+ _u.SetTargeting(*v)
+ }
+ return _u
+}
+
+// ClearTargeting clears the value of the "targeting" field.
+func (_u *AnnouncementUpdateOne) ClearTargeting() *AnnouncementUpdateOne {
+ _u.mutation.ClearTargeting()
+ return _u
+}
+
+// SetStartsAt sets the "starts_at" field.
+func (_u *AnnouncementUpdateOne) SetStartsAt(v time.Time) *AnnouncementUpdateOne {
+ _u.mutation.SetStartsAt(v)
+ return _u
+}
+
+// SetNillableStartsAt sets the "starts_at" field if the given value is not nil.
+func (_u *AnnouncementUpdateOne) SetNillableStartsAt(v *time.Time) *AnnouncementUpdateOne {
+ if v != nil {
+ _u.SetStartsAt(*v)
+ }
+ return _u
+}
+
+// ClearStartsAt clears the value of the "starts_at" field.
+func (_u *AnnouncementUpdateOne) ClearStartsAt() *AnnouncementUpdateOne {
+ _u.mutation.ClearStartsAt()
+ return _u
+}
+
+// SetEndsAt sets the "ends_at" field.
+func (_u *AnnouncementUpdateOne) SetEndsAt(v time.Time) *AnnouncementUpdateOne {
+ _u.mutation.SetEndsAt(v)
+ return _u
+}
+
+// SetNillableEndsAt sets the "ends_at" field if the given value is not nil.
+func (_u *AnnouncementUpdateOne) SetNillableEndsAt(v *time.Time) *AnnouncementUpdateOne {
+ if v != nil {
+ _u.SetEndsAt(*v)
+ }
+ return _u
+}
+
+// ClearEndsAt clears the value of the "ends_at" field.
+func (_u *AnnouncementUpdateOne) ClearEndsAt() *AnnouncementUpdateOne {
+ _u.mutation.ClearEndsAt()
+ return _u
+}
+
+// SetCreatedBy sets the "created_by" field.
+func (_u *AnnouncementUpdateOne) SetCreatedBy(v int64) *AnnouncementUpdateOne {
+ _u.mutation.ResetCreatedBy()
+ _u.mutation.SetCreatedBy(v)
+ return _u
+}
+
+// SetNillableCreatedBy sets the "created_by" field if the given value is not nil.
+func (_u *AnnouncementUpdateOne) SetNillableCreatedBy(v *int64) *AnnouncementUpdateOne {
+ if v != nil {
+ _u.SetCreatedBy(*v)
+ }
+ return _u
+}
+
+// AddCreatedBy adds value to the "created_by" field.
+func (_u *AnnouncementUpdateOne) AddCreatedBy(v int64) *AnnouncementUpdateOne {
+ _u.mutation.AddCreatedBy(v)
+ return _u
+}
+
+// ClearCreatedBy clears the value of the "created_by" field.
+func (_u *AnnouncementUpdateOne) ClearCreatedBy() *AnnouncementUpdateOne {
+ _u.mutation.ClearCreatedBy()
+ return _u
+}
+
+// SetUpdatedBy sets the "updated_by" field.
+func (_u *AnnouncementUpdateOne) SetUpdatedBy(v int64) *AnnouncementUpdateOne {
+ _u.mutation.ResetUpdatedBy()
+ _u.mutation.SetUpdatedBy(v)
+ return _u
+}
+
+// SetNillableUpdatedBy sets the "updated_by" field if the given value is not nil.
+func (_u *AnnouncementUpdateOne) SetNillableUpdatedBy(v *int64) *AnnouncementUpdateOne {
+ if v != nil {
+ _u.SetUpdatedBy(*v)
+ }
+ return _u
+}
+
+// AddUpdatedBy adds value to the "updated_by" field.
+func (_u *AnnouncementUpdateOne) AddUpdatedBy(v int64) *AnnouncementUpdateOne {
+ _u.mutation.AddUpdatedBy(v)
+ return _u
+}
+
+// ClearUpdatedBy clears the value of the "updated_by" field.
+func (_u *AnnouncementUpdateOne) ClearUpdatedBy() *AnnouncementUpdateOne {
+ _u.mutation.ClearUpdatedBy()
+ return _u
+}
+
+// SetUpdatedAt sets the "updated_at" field.
+func (_u *AnnouncementUpdateOne) SetUpdatedAt(v time.Time) *AnnouncementUpdateOne {
+ _u.mutation.SetUpdatedAt(v)
+ return _u
+}
+
+// AddReadIDs adds the "reads" edge to the AnnouncementRead entity by IDs.
+func (_u *AnnouncementUpdateOne) AddReadIDs(ids ...int64) *AnnouncementUpdateOne {
+ _u.mutation.AddReadIDs(ids...)
+ return _u
+}
+
+// AddReads adds the "reads" edges to the AnnouncementRead entity.
+func (_u *AnnouncementUpdateOne) AddReads(v ...*AnnouncementRead) *AnnouncementUpdateOne {
+ ids := make([]int64, len(v))
+ for i := range v {
+ ids[i] = v[i].ID
+ }
+ return _u.AddReadIDs(ids...)
+}
+
+// Mutation returns the AnnouncementMutation object of the builder.
+func (_u *AnnouncementUpdateOne) Mutation() *AnnouncementMutation {
+ return _u.mutation
+}
+
+// ClearReads clears all "reads" edges to the AnnouncementRead entity.
+func (_u *AnnouncementUpdateOne) ClearReads() *AnnouncementUpdateOne {
+ _u.mutation.ClearReads()
+ return _u
+}
+
+// RemoveReadIDs removes the "reads" edge to AnnouncementRead entities by IDs.
+func (_u *AnnouncementUpdateOne) RemoveReadIDs(ids ...int64) *AnnouncementUpdateOne {
+ _u.mutation.RemoveReadIDs(ids...)
+ return _u
+}
+
+// RemoveReads removes "reads" edges to AnnouncementRead entities.
+func (_u *AnnouncementUpdateOne) RemoveReads(v ...*AnnouncementRead) *AnnouncementUpdateOne {
+ ids := make([]int64, len(v))
+ for i := range v {
+ ids[i] = v[i].ID
+ }
+ return _u.RemoveReadIDs(ids...)
+}
+
+// Where appends a list predicates to the AnnouncementUpdate builder.
+func (_u *AnnouncementUpdateOne) Where(ps ...predicate.Announcement) *AnnouncementUpdateOne {
+ _u.mutation.Where(ps...)
+ return _u
+}
+
+// Select allows selecting one or more fields (columns) of the returned entity.
+// The default is selecting all fields defined in the entity schema.
+func (_u *AnnouncementUpdateOne) Select(field string, fields ...string) *AnnouncementUpdateOne {
+ _u.fields = append([]string{field}, fields...)
+ return _u
+}
+
+// Save executes the query and returns the updated Announcement entity.
+func (_u *AnnouncementUpdateOne) Save(ctx context.Context) (*Announcement, error) {
+ _u.defaults()
+ return withHooks(ctx, _u.sqlSave, _u.mutation, _u.hooks)
+}
+
+// SaveX is like Save, but panics if an error occurs.
+func (_u *AnnouncementUpdateOne) SaveX(ctx context.Context) *Announcement {
+ node, err := _u.Save(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return node
+}
+
+// Exec executes the query on the entity.
+func (_u *AnnouncementUpdateOne) Exec(ctx context.Context) error {
+ _, err := _u.Save(ctx)
+ return err
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (_u *AnnouncementUpdateOne) ExecX(ctx context.Context) {
+ if err := _u.Exec(ctx); err != nil {
+ panic(err)
+ }
+}
+
+// defaults sets the default values of the builder before save.
+func (_u *AnnouncementUpdateOne) defaults() {
+ if _, ok := _u.mutation.UpdatedAt(); !ok {
+ v := announcement.UpdateDefaultUpdatedAt()
+ _u.mutation.SetUpdatedAt(v)
+ }
+}
+
+// check runs all checks and user-defined validators on the builder.
+func (_u *AnnouncementUpdateOne) check() error {
+ if v, ok := _u.mutation.Title(); ok {
+ if err := announcement.TitleValidator(v); err != nil {
+ return &ValidationError{Name: "title", err: fmt.Errorf(`ent: validator failed for field "Announcement.title": %w`, err)}
+ }
+ }
+ if v, ok := _u.mutation.Content(); ok {
+ if err := announcement.ContentValidator(v); err != nil {
+ return &ValidationError{Name: "content", err: fmt.Errorf(`ent: validator failed for field "Announcement.content": %w`, err)}
+ }
+ }
+ if v, ok := _u.mutation.Status(); ok {
+ if err := announcement.StatusValidator(v); err != nil {
+ return &ValidationError{Name: "status", err: fmt.Errorf(`ent: validator failed for field "Announcement.status": %w`, err)}
+ }
+ }
+ return nil
+}
+
+func (_u *AnnouncementUpdateOne) sqlSave(ctx context.Context) (_node *Announcement, err error) {
+ if err := _u.check(); err != nil {
+ return _node, err
+ }
+ _spec := sqlgraph.NewUpdateSpec(announcement.Table, announcement.Columns, sqlgraph.NewFieldSpec(announcement.FieldID, field.TypeInt64))
+ id, ok := _u.mutation.ID()
+ if !ok {
+ return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "Announcement.id" for update`)}
+ }
+ _spec.Node.ID.Value = id
+ if fields := _u.fields; len(fields) > 0 {
+ _spec.Node.Columns = make([]string, 0, len(fields))
+ _spec.Node.Columns = append(_spec.Node.Columns, announcement.FieldID)
+ for _, f := range fields {
+ if !announcement.ValidColumn(f) {
+ return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
+ }
+ if f != announcement.FieldID {
+ _spec.Node.Columns = append(_spec.Node.Columns, f)
+ }
+ }
+ }
+ if ps := _u.mutation.predicates; len(ps) > 0 {
+ _spec.Predicate = func(selector *sql.Selector) {
+ for i := range ps {
+ ps[i](selector)
+ }
+ }
+ }
+ if value, ok := _u.mutation.Title(); ok {
+ _spec.SetField(announcement.FieldTitle, field.TypeString, value)
+ }
+ if value, ok := _u.mutation.Content(); ok {
+ _spec.SetField(announcement.FieldContent, field.TypeString, value)
+ }
+ if value, ok := _u.mutation.Status(); ok {
+ _spec.SetField(announcement.FieldStatus, field.TypeString, value)
+ }
+ if value, ok := _u.mutation.Targeting(); ok {
+ _spec.SetField(announcement.FieldTargeting, field.TypeJSON, value)
+ }
+ if _u.mutation.TargetingCleared() {
+ _spec.ClearField(announcement.FieldTargeting, field.TypeJSON)
+ }
+ if value, ok := _u.mutation.StartsAt(); ok {
+ _spec.SetField(announcement.FieldStartsAt, field.TypeTime, value)
+ }
+ if _u.mutation.StartsAtCleared() {
+ _spec.ClearField(announcement.FieldStartsAt, field.TypeTime)
+ }
+ if value, ok := _u.mutation.EndsAt(); ok {
+ _spec.SetField(announcement.FieldEndsAt, field.TypeTime, value)
+ }
+ if _u.mutation.EndsAtCleared() {
+ _spec.ClearField(announcement.FieldEndsAt, field.TypeTime)
+ }
+ if value, ok := _u.mutation.CreatedBy(); ok {
+ _spec.SetField(announcement.FieldCreatedBy, field.TypeInt64, value)
+ }
+ if value, ok := _u.mutation.AddedCreatedBy(); ok {
+ _spec.AddField(announcement.FieldCreatedBy, field.TypeInt64, value)
+ }
+ if _u.mutation.CreatedByCleared() {
+ _spec.ClearField(announcement.FieldCreatedBy, field.TypeInt64)
+ }
+ if value, ok := _u.mutation.UpdatedBy(); ok {
+ _spec.SetField(announcement.FieldUpdatedBy, field.TypeInt64, value)
+ }
+ if value, ok := _u.mutation.AddedUpdatedBy(); ok {
+ _spec.AddField(announcement.FieldUpdatedBy, field.TypeInt64, value)
+ }
+ if _u.mutation.UpdatedByCleared() {
+ _spec.ClearField(announcement.FieldUpdatedBy, field.TypeInt64)
+ }
+ if value, ok := _u.mutation.UpdatedAt(); ok {
+ _spec.SetField(announcement.FieldUpdatedAt, field.TypeTime, value)
+ }
+ if _u.mutation.ReadsCleared() {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.O2M,
+ Inverse: false,
+ Table: announcement.ReadsTable,
+ Columns: []string{announcement.ReadsColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64),
+ },
+ }
+ _spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+ }
+ if nodes := _u.mutation.RemovedReadsIDs(); len(nodes) > 0 && !_u.mutation.ReadsCleared() {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.O2M,
+ Inverse: false,
+ Table: announcement.ReadsTable,
+ Columns: []string{announcement.ReadsColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64),
+ },
+ }
+ for _, k := range nodes {
+ edge.Target.Nodes = append(edge.Target.Nodes, k)
+ }
+ _spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+ }
+ if nodes := _u.mutation.ReadsIDs(); len(nodes) > 0 {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.O2M,
+ Inverse: false,
+ Table: announcement.ReadsTable,
+ Columns: []string{announcement.ReadsColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64),
+ },
+ }
+ for _, k := range nodes {
+ edge.Target.Nodes = append(edge.Target.Nodes, k)
+ }
+ _spec.Edges.Add = append(_spec.Edges.Add, edge)
+ }
+ _node = &Announcement{config: _u.config}
+ _spec.Assign = _node.assignValues
+ _spec.ScanValues = _node.scanValues
+ if err = sqlgraph.UpdateNode(ctx, _u.driver, _spec); err != nil {
+ if _, ok := err.(*sqlgraph.NotFoundError); ok {
+ err = &NotFoundError{announcement.Label}
+ } else if sqlgraph.IsConstraintError(err) {
+ err = &ConstraintError{msg: err.Error(), wrap: err}
+ }
+ return nil, err
+ }
+ _u.mutation.done = true
+ return _node, nil
+}
diff --git a/backend/ent/announcementread.go b/backend/ent/announcementread.go
new file mode 100644
index 00000000..7bba04f2
--- /dev/null
+++ b/backend/ent/announcementread.go
@@ -0,0 +1,185 @@
+// Code generated by ent, DO NOT EDIT.
+
+package ent
+
+import (
+ "fmt"
+ "strings"
+ "time"
+
+ "entgo.io/ent"
+ "entgo.io/ent/dialect/sql"
+ "github.com/Wei-Shaw/sub2api/ent/announcement"
+ "github.com/Wei-Shaw/sub2api/ent/announcementread"
+ "github.com/Wei-Shaw/sub2api/ent/user"
+)
+
+// AnnouncementRead is the model entity for the AnnouncementRead schema.
+type AnnouncementRead struct {
+ config `json:"-"`
+ // ID of the ent.
+ ID int64 `json:"id,omitempty"`
+ // AnnouncementID holds the value of the "announcement_id" field.
+ AnnouncementID int64 `json:"announcement_id,omitempty"`
+ // UserID holds the value of the "user_id" field.
+ UserID int64 `json:"user_id,omitempty"`
+ // 用户首次已读时间
+ ReadAt time.Time `json:"read_at,omitempty"`
+ // CreatedAt holds the value of the "created_at" field.
+ CreatedAt time.Time `json:"created_at,omitempty"`
+ // Edges holds the relations/edges for other nodes in the graph.
+ // The values are being populated by the AnnouncementReadQuery when eager-loading is set.
+ Edges AnnouncementReadEdges `json:"edges"`
+ selectValues sql.SelectValues
+}
+
+// AnnouncementReadEdges holds the relations/edges for other nodes in the graph.
+type AnnouncementReadEdges struct {
+ // Announcement holds the value of the announcement edge.
+ Announcement *Announcement `json:"announcement,omitempty"`
+ // User holds the value of the user edge.
+ User *User `json:"user,omitempty"`
+ // loadedTypes holds the information for reporting if a
+ // type was loaded (or requested) in eager-loading or not.
+ loadedTypes [2]bool
+}
+
+// AnnouncementOrErr returns the Announcement value or an error if the edge
+// was not loaded in eager-loading, or loaded but was not found.
+func (e AnnouncementReadEdges) AnnouncementOrErr() (*Announcement, error) {
+ if e.Announcement != nil {
+ return e.Announcement, nil
+ } else if e.loadedTypes[0] {
+ return nil, &NotFoundError{label: announcement.Label}
+ }
+ return nil, &NotLoadedError{edge: "announcement"}
+}
+
+// UserOrErr returns the User value or an error if the edge
+// was not loaded in eager-loading, or loaded but was not found.
+func (e AnnouncementReadEdges) UserOrErr() (*User, error) {
+ if e.User != nil {
+ return e.User, nil
+ } else if e.loadedTypes[1] {
+ return nil, &NotFoundError{label: user.Label}
+ }
+ return nil, &NotLoadedError{edge: "user"}
+}
+
+// scanValues returns the types for scanning values from sql.Rows.
+func (*AnnouncementRead) scanValues(columns []string) ([]any, error) {
+ values := make([]any, len(columns))
+ for i := range columns {
+ switch columns[i] {
+ case announcementread.FieldID, announcementread.FieldAnnouncementID, announcementread.FieldUserID:
+ values[i] = new(sql.NullInt64)
+ case announcementread.FieldReadAt, announcementread.FieldCreatedAt:
+ values[i] = new(sql.NullTime)
+ default:
+ values[i] = new(sql.UnknownType)
+ }
+ }
+ return values, nil
+}
+
+// assignValues assigns the values that were returned from sql.Rows (after scanning)
+// to the AnnouncementRead fields.
+func (_m *AnnouncementRead) assignValues(columns []string, values []any) error {
+ if m, n := len(values), len(columns); m < n {
+ return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
+ }
+ for i := range columns {
+ switch columns[i] {
+ case announcementread.FieldID:
+ value, ok := values[i].(*sql.NullInt64)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field id", value)
+ }
+ _m.ID = int64(value.Int64)
+ case announcementread.FieldAnnouncementID:
+ if value, ok := values[i].(*sql.NullInt64); !ok {
+ return fmt.Errorf("unexpected type %T for field announcement_id", values[i])
+ } else if value.Valid {
+ _m.AnnouncementID = value.Int64
+ }
+ case announcementread.FieldUserID:
+ if value, ok := values[i].(*sql.NullInt64); !ok {
+ return fmt.Errorf("unexpected type %T for field user_id", values[i])
+ } else if value.Valid {
+ _m.UserID = value.Int64
+ }
+ case announcementread.FieldReadAt:
+ if value, ok := values[i].(*sql.NullTime); !ok {
+ return fmt.Errorf("unexpected type %T for field read_at", values[i])
+ } else if value.Valid {
+ _m.ReadAt = value.Time
+ }
+ case announcementread.FieldCreatedAt:
+ if value, ok := values[i].(*sql.NullTime); !ok {
+ return fmt.Errorf("unexpected type %T for field created_at", values[i])
+ } else if value.Valid {
+ _m.CreatedAt = value.Time
+ }
+ default:
+ _m.selectValues.Set(columns[i], values[i])
+ }
+ }
+ return nil
+}
+
+// Value returns the ent.Value that was dynamically selected and assigned to the AnnouncementRead.
+// This includes values selected through modifiers, order, etc.
+func (_m *AnnouncementRead) Value(name string) (ent.Value, error) {
+ return _m.selectValues.Get(name)
+}
+
+// QueryAnnouncement queries the "announcement" edge of the AnnouncementRead entity.
+func (_m *AnnouncementRead) QueryAnnouncement() *AnnouncementQuery {
+ return NewAnnouncementReadClient(_m.config).QueryAnnouncement(_m)
+}
+
+// QueryUser queries the "user" edge of the AnnouncementRead entity.
+func (_m *AnnouncementRead) QueryUser() *UserQuery {
+ return NewAnnouncementReadClient(_m.config).QueryUser(_m)
+}
+
+// Update returns a builder for updating this AnnouncementRead.
+// Note that you need to call AnnouncementRead.Unwrap() before calling this method if this AnnouncementRead
+// was returned from a transaction, and the transaction was committed or rolled back.
+func (_m *AnnouncementRead) Update() *AnnouncementReadUpdateOne {
+ return NewAnnouncementReadClient(_m.config).UpdateOne(_m)
+}
+
+// Unwrap unwraps the AnnouncementRead entity that was returned from a transaction after it was closed,
+// so that all future queries will be executed through the driver which created the transaction.
+func (_m *AnnouncementRead) Unwrap() *AnnouncementRead {
+ _tx, ok := _m.config.driver.(*txDriver)
+ if !ok {
+ panic("ent: AnnouncementRead is not a transactional entity")
+ }
+ _m.config.driver = _tx.drv
+ return _m
+}
+
+// String implements the fmt.Stringer.
+func (_m *AnnouncementRead) String() string {
+ var builder strings.Builder
+ builder.WriteString("AnnouncementRead(")
+ builder.WriteString(fmt.Sprintf("id=%v, ", _m.ID))
+ builder.WriteString("announcement_id=")
+ builder.WriteString(fmt.Sprintf("%v", _m.AnnouncementID))
+ builder.WriteString(", ")
+ builder.WriteString("user_id=")
+ builder.WriteString(fmt.Sprintf("%v", _m.UserID))
+ builder.WriteString(", ")
+ builder.WriteString("read_at=")
+ builder.WriteString(_m.ReadAt.Format(time.ANSIC))
+ builder.WriteString(", ")
+ builder.WriteString("created_at=")
+ builder.WriteString(_m.CreatedAt.Format(time.ANSIC))
+ builder.WriteByte(')')
+ return builder.String()
+}
+
+// AnnouncementReads is a parsable slice of AnnouncementRead.
+type AnnouncementReads []*AnnouncementRead
diff --git a/backend/ent/announcementread/announcementread.go b/backend/ent/announcementread/announcementread.go
new file mode 100644
index 00000000..cf5fe458
--- /dev/null
+++ b/backend/ent/announcementread/announcementread.go
@@ -0,0 +1,127 @@
+// Code generated by ent, DO NOT EDIT.
+
+package announcementread
+
+import (
+ "time"
+
+ "entgo.io/ent/dialect/sql"
+ "entgo.io/ent/dialect/sql/sqlgraph"
+)
+
+const (
+ // Label holds the string label denoting the announcementread type in the database.
+ Label = "announcement_read"
+ // FieldID holds the string denoting the id field in the database.
+ FieldID = "id"
+ // FieldAnnouncementID holds the string denoting the announcement_id field in the database.
+ FieldAnnouncementID = "announcement_id"
+ // FieldUserID holds the string denoting the user_id field in the database.
+ FieldUserID = "user_id"
+ // FieldReadAt holds the string denoting the read_at field in the database.
+ FieldReadAt = "read_at"
+ // FieldCreatedAt holds the string denoting the created_at field in the database.
+ FieldCreatedAt = "created_at"
+ // EdgeAnnouncement holds the string denoting the announcement edge name in mutations.
+ EdgeAnnouncement = "announcement"
+ // EdgeUser holds the string denoting the user edge name in mutations.
+ EdgeUser = "user"
+ // Table holds the table name of the announcementread in the database.
+ Table = "announcement_reads"
+ // AnnouncementTable is the table that holds the announcement relation/edge.
+ AnnouncementTable = "announcement_reads"
+ // AnnouncementInverseTable is the table name for the Announcement entity.
+ // It exists in this package in order to avoid circular dependency with the "announcement" package.
+ AnnouncementInverseTable = "announcements"
+ // AnnouncementColumn is the table column denoting the announcement relation/edge.
+ AnnouncementColumn = "announcement_id"
+ // UserTable is the table that holds the user relation/edge.
+ UserTable = "announcement_reads"
+ // UserInverseTable is the table name for the User entity.
+ // It exists in this package in order to avoid circular dependency with the "user" package.
+ UserInverseTable = "users"
+ // UserColumn is the table column denoting the user relation/edge.
+ UserColumn = "user_id"
+)
+
+// Columns holds all SQL columns for announcementread fields.
+var Columns = []string{
+ FieldID,
+ FieldAnnouncementID,
+ FieldUserID,
+ FieldReadAt,
+ FieldCreatedAt,
+}
+
+// ValidColumn reports if the column name is valid (part of the table columns).
+func ValidColumn(column string) bool {
+ for i := range Columns {
+ if column == Columns[i] {
+ return true
+ }
+ }
+ return false
+}
+
+var (
+ // DefaultReadAt holds the default value on creation for the "read_at" field.
+ DefaultReadAt func() time.Time
+ // DefaultCreatedAt holds the default value on creation for the "created_at" field.
+ DefaultCreatedAt func() time.Time
+)
+
+// OrderOption defines the ordering options for the AnnouncementRead queries.
+type OrderOption func(*sql.Selector)
+
+// ByID orders the results by the id field.
+func ByID(opts ...sql.OrderTermOption) OrderOption {
+ return sql.OrderByField(FieldID, opts...).ToFunc()
+}
+
+// ByAnnouncementID orders the results by the announcement_id field.
+func ByAnnouncementID(opts ...sql.OrderTermOption) OrderOption {
+ return sql.OrderByField(FieldAnnouncementID, opts...).ToFunc()
+}
+
+// ByUserID orders the results by the user_id field.
+func ByUserID(opts ...sql.OrderTermOption) OrderOption {
+ return sql.OrderByField(FieldUserID, opts...).ToFunc()
+}
+
+// ByReadAt orders the results by the read_at field.
+func ByReadAt(opts ...sql.OrderTermOption) OrderOption {
+ return sql.OrderByField(FieldReadAt, opts...).ToFunc()
+}
+
+// ByCreatedAt orders the results by the created_at field.
+func ByCreatedAt(opts ...sql.OrderTermOption) OrderOption {
+ return sql.OrderByField(FieldCreatedAt, opts...).ToFunc()
+}
+
+// ByAnnouncementField orders the results by announcement field.
+func ByAnnouncementField(field string, opts ...sql.OrderTermOption) OrderOption {
+ return func(s *sql.Selector) {
+ sqlgraph.OrderByNeighborTerms(s, newAnnouncementStep(), sql.OrderByField(field, opts...))
+ }
+}
+
+// ByUserField orders the results by user field.
+func ByUserField(field string, opts ...sql.OrderTermOption) OrderOption {
+ return func(s *sql.Selector) {
+ sqlgraph.OrderByNeighborTerms(s, newUserStep(), sql.OrderByField(field, opts...))
+ }
+}
+func newAnnouncementStep() *sqlgraph.Step {
+ return sqlgraph.NewStep(
+ sqlgraph.From(Table, FieldID),
+ sqlgraph.To(AnnouncementInverseTable, FieldID),
+ sqlgraph.Edge(sqlgraph.M2O, true, AnnouncementTable, AnnouncementColumn),
+ )
+}
+func newUserStep() *sqlgraph.Step {
+ return sqlgraph.NewStep(
+ sqlgraph.From(Table, FieldID),
+ sqlgraph.To(UserInverseTable, FieldID),
+ sqlgraph.Edge(sqlgraph.M2O, true, UserTable, UserColumn),
+ )
+}
diff --git a/backend/ent/announcementread/where.go b/backend/ent/announcementread/where.go
new file mode 100644
index 00000000..1a4305e8
--- /dev/null
+++ b/backend/ent/announcementread/where.go
@@ -0,0 +1,257 @@
+// Code generated by ent, DO NOT EDIT.
+
+package announcementread
+
+import (
+ "time"
+
+ "entgo.io/ent/dialect/sql"
+ "entgo.io/ent/dialect/sql/sqlgraph"
+ "github.com/Wei-Shaw/sub2api/ent/predicate"
+)
+
+// ID filters vertices based on their ID field.
+func ID(id int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldEQ(FieldID, id))
+}
+
+// IDEQ applies the EQ predicate on the ID field.
+func IDEQ(id int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldEQ(FieldID, id))
+}
+
+// IDNEQ applies the NEQ predicate on the ID field.
+func IDNEQ(id int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldNEQ(FieldID, id))
+}
+
+// IDIn applies the In predicate on the ID field.
+func IDIn(ids ...int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldIn(FieldID, ids...))
+}
+
+// IDNotIn applies the NotIn predicate on the ID field.
+func IDNotIn(ids ...int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldNotIn(FieldID, ids...))
+}
+
+// IDGT applies the GT predicate on the ID field.
+func IDGT(id int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldGT(FieldID, id))
+}
+
+// IDGTE applies the GTE predicate on the ID field.
+func IDGTE(id int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldGTE(FieldID, id))
+}
+
+// IDLT applies the LT predicate on the ID field.
+func IDLT(id int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldLT(FieldID, id))
+}
+
+// IDLTE applies the LTE predicate on the ID field.
+func IDLTE(id int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldLTE(FieldID, id))
+}
+
+// AnnouncementID applies equality check predicate on the "announcement_id" field. It's identical to AnnouncementIDEQ.
+func AnnouncementID(v int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldEQ(FieldAnnouncementID, v))
+}
+
+// UserID applies equality check predicate on the "user_id" field. It's identical to UserIDEQ.
+func UserID(v int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldEQ(FieldUserID, v))
+}
+
+// ReadAt applies equality check predicate on the "read_at" field. It's identical to ReadAtEQ.
+func ReadAt(v time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldEQ(FieldReadAt, v))
+}
+
+// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ.
+func CreatedAt(v time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldEQ(FieldCreatedAt, v))
+}
+
+// AnnouncementIDEQ applies the EQ predicate on the "announcement_id" field.
+func AnnouncementIDEQ(v int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldEQ(FieldAnnouncementID, v))
+}
+
+// AnnouncementIDNEQ applies the NEQ predicate on the "announcement_id" field.
+func AnnouncementIDNEQ(v int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldNEQ(FieldAnnouncementID, v))
+}
+
+// AnnouncementIDIn applies the In predicate on the "announcement_id" field.
+func AnnouncementIDIn(vs ...int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldIn(FieldAnnouncementID, vs...))
+}
+
+// AnnouncementIDNotIn applies the NotIn predicate on the "announcement_id" field.
+func AnnouncementIDNotIn(vs ...int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldNotIn(FieldAnnouncementID, vs...))
+}
+
+// UserIDEQ applies the EQ predicate on the "user_id" field.
+func UserIDEQ(v int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldEQ(FieldUserID, v))
+}
+
+// UserIDNEQ applies the NEQ predicate on the "user_id" field.
+func UserIDNEQ(v int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldNEQ(FieldUserID, v))
+}
+
+// UserIDIn applies the In predicate on the "user_id" field.
+func UserIDIn(vs ...int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldIn(FieldUserID, vs...))
+}
+
+// UserIDNotIn applies the NotIn predicate on the "user_id" field.
+func UserIDNotIn(vs ...int64) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldNotIn(FieldUserID, vs...))
+}
+
+// ReadAtEQ applies the EQ predicate on the "read_at" field.
+func ReadAtEQ(v time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldEQ(FieldReadAt, v))
+}
+
+// ReadAtNEQ applies the NEQ predicate on the "read_at" field.
+func ReadAtNEQ(v time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldNEQ(FieldReadAt, v))
+}
+
+// ReadAtIn applies the In predicate on the "read_at" field.
+func ReadAtIn(vs ...time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldIn(FieldReadAt, vs...))
+}
+
+// ReadAtNotIn applies the NotIn predicate on the "read_at" field.
+func ReadAtNotIn(vs ...time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldNotIn(FieldReadAt, vs...))
+}
+
+// ReadAtGT applies the GT predicate on the "read_at" field.
+func ReadAtGT(v time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldGT(FieldReadAt, v))
+}
+
+// ReadAtGTE applies the GTE predicate on the "read_at" field.
+func ReadAtGTE(v time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldGTE(FieldReadAt, v))
+}
+
+// ReadAtLT applies the LT predicate on the "read_at" field.
+func ReadAtLT(v time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldLT(FieldReadAt, v))
+}
+
+// ReadAtLTE applies the LTE predicate on the "read_at" field.
+func ReadAtLTE(v time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldLTE(FieldReadAt, v))
+}
+
+// CreatedAtEQ applies the EQ predicate on the "created_at" field.
+func CreatedAtEQ(v time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldEQ(FieldCreatedAt, v))
+}
+
+// CreatedAtNEQ applies the NEQ predicate on the "created_at" field.
+func CreatedAtNEQ(v time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldNEQ(FieldCreatedAt, v))
+}
+
+// CreatedAtIn applies the In predicate on the "created_at" field.
+func CreatedAtIn(vs ...time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldIn(FieldCreatedAt, vs...))
+}
+
+// CreatedAtNotIn applies the NotIn predicate on the "created_at" field.
+func CreatedAtNotIn(vs ...time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldNotIn(FieldCreatedAt, vs...))
+}
+
+// CreatedAtGT applies the GT predicate on the "created_at" field.
+func CreatedAtGT(v time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldGT(FieldCreatedAt, v))
+}
+
+// CreatedAtGTE applies the GTE predicate on the "created_at" field.
+func CreatedAtGTE(v time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldGTE(FieldCreatedAt, v))
+}
+
+// CreatedAtLT applies the LT predicate on the "created_at" field.
+func CreatedAtLT(v time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldLT(FieldCreatedAt, v))
+}
+
+// CreatedAtLTE applies the LTE predicate on the "created_at" field.
+func CreatedAtLTE(v time.Time) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.FieldLTE(FieldCreatedAt, v))
+}
+
+// HasAnnouncement applies the HasEdge predicate on the "announcement" edge.
+func HasAnnouncement() predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(func(s *sql.Selector) {
+ step := sqlgraph.NewStep(
+ sqlgraph.From(Table, FieldID),
+ sqlgraph.Edge(sqlgraph.M2O, true, AnnouncementTable, AnnouncementColumn),
+ )
+ sqlgraph.HasNeighbors(s, step)
+ })
+}
+
+// HasAnnouncementWith applies the HasEdge predicate on the "announcement" edge with a given conditions (other predicates).
+func HasAnnouncementWith(preds ...predicate.Announcement) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(func(s *sql.Selector) {
+ step := newAnnouncementStep()
+ sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
+ for _, p := range preds {
+ p(s)
+ }
+ })
+ })
+}
+
+// HasUser applies the HasEdge predicate on the "user" edge.
+func HasUser() predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(func(s *sql.Selector) {
+ step := sqlgraph.NewStep(
+ sqlgraph.From(Table, FieldID),
+ sqlgraph.Edge(sqlgraph.M2O, true, UserTable, UserColumn),
+ )
+ sqlgraph.HasNeighbors(s, step)
+ })
+}
+
+// HasUserWith applies the HasEdge predicate on the "user" edge with a given conditions (other predicates).
+func HasUserWith(preds ...predicate.User) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(func(s *sql.Selector) {
+ step := newUserStep()
+ sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
+ for _, p := range preds {
+ p(s)
+ }
+ })
+ })
+}
+
+// And groups predicates with the AND operator between them.
+func And(predicates ...predicate.AnnouncementRead) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.AndPredicates(predicates...))
+}
+
+// Or groups predicates with the OR operator between them.
+func Or(predicates ...predicate.AnnouncementRead) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.OrPredicates(predicates...))
+}
+
+// Not applies the not operator on the given predicate.
+func Not(p predicate.AnnouncementRead) predicate.AnnouncementRead {
+ return predicate.AnnouncementRead(sql.NotPredicates(p))
+}
diff --git a/backend/ent/announcementread_create.go b/backend/ent/announcementread_create.go
new file mode 100644
index 00000000..c8c211ff
--- /dev/null
+++ b/backend/ent/announcementread_create.go
@@ -0,0 +1,660 @@
+// Code generated by ent, DO NOT EDIT.
+
+package ent
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "time"
+
+ "entgo.io/ent/dialect/sql"
+ "entgo.io/ent/dialect/sql/sqlgraph"
+ "entgo.io/ent/schema/field"
+ "github.com/Wei-Shaw/sub2api/ent/announcement"
+ "github.com/Wei-Shaw/sub2api/ent/announcementread"
+ "github.com/Wei-Shaw/sub2api/ent/user"
+)
+
+// AnnouncementReadCreate is the builder for creating a AnnouncementRead entity.
+type AnnouncementReadCreate struct {
+ config
+ mutation *AnnouncementReadMutation
+ hooks []Hook
+ conflict []sql.ConflictOption
+}
+
+// SetAnnouncementID sets the "announcement_id" field.
+func (_c *AnnouncementReadCreate) SetAnnouncementID(v int64) *AnnouncementReadCreate {
+ _c.mutation.SetAnnouncementID(v)
+ return _c
+}
+
+// SetUserID sets the "user_id" field.
+func (_c *AnnouncementReadCreate) SetUserID(v int64) *AnnouncementReadCreate {
+ _c.mutation.SetUserID(v)
+ return _c
+}
+
+// SetReadAt sets the "read_at" field.
+func (_c *AnnouncementReadCreate) SetReadAt(v time.Time) *AnnouncementReadCreate {
+ _c.mutation.SetReadAt(v)
+ return _c
+}
+
+// SetNillableReadAt sets the "read_at" field if the given value is not nil.
+func (_c *AnnouncementReadCreate) SetNillableReadAt(v *time.Time) *AnnouncementReadCreate {
+ if v != nil {
+ _c.SetReadAt(*v)
+ }
+ return _c
+}
+
+// SetCreatedAt sets the "created_at" field.
+func (_c *AnnouncementReadCreate) SetCreatedAt(v time.Time) *AnnouncementReadCreate {
+ _c.mutation.SetCreatedAt(v)
+ return _c
+}
+
+// SetNillableCreatedAt sets the "created_at" field if the given value is not nil.
+func (_c *AnnouncementReadCreate) SetNillableCreatedAt(v *time.Time) *AnnouncementReadCreate {
+ if v != nil {
+ _c.SetCreatedAt(*v)
+ }
+ return _c
+}
+
+// SetAnnouncement sets the "announcement" edge to the Announcement entity.
+func (_c *AnnouncementReadCreate) SetAnnouncement(v *Announcement) *AnnouncementReadCreate {
+ return _c.SetAnnouncementID(v.ID)
+}
+
+// SetUser sets the "user" edge to the User entity.
+func (_c *AnnouncementReadCreate) SetUser(v *User) *AnnouncementReadCreate {
+ return _c.SetUserID(v.ID)
+}
+
+// Mutation returns the AnnouncementReadMutation object of the builder.
+func (_c *AnnouncementReadCreate) Mutation() *AnnouncementReadMutation {
+ return _c.mutation
+}
+
+// Save creates the AnnouncementRead in the database.
+func (_c *AnnouncementReadCreate) Save(ctx context.Context) (*AnnouncementRead, error) {
+ _c.defaults()
+ return withHooks(ctx, _c.sqlSave, _c.mutation, _c.hooks)
+}
+
+// SaveX calls Save and panics if Save returns an error.
+func (_c *AnnouncementReadCreate) SaveX(ctx context.Context) *AnnouncementRead {
+ v, err := _c.Save(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return v
+}
+
+// Exec executes the query.
+func (_c *AnnouncementReadCreate) Exec(ctx context.Context) error {
+ _, err := _c.Save(ctx)
+ return err
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (_c *AnnouncementReadCreate) ExecX(ctx context.Context) {
+ if err := _c.Exec(ctx); err != nil {
+ panic(err)
+ }
+}
+
+// defaults sets the default values of the builder before save.
+func (_c *AnnouncementReadCreate) defaults() {
+ if _, ok := _c.mutation.ReadAt(); !ok {
+ v := announcementread.DefaultReadAt()
+ _c.mutation.SetReadAt(v)
+ }
+ if _, ok := _c.mutation.CreatedAt(); !ok {
+ v := announcementread.DefaultCreatedAt()
+ _c.mutation.SetCreatedAt(v)
+ }
+}
+
+// check runs all checks and user-defined validators on the builder.
+func (_c *AnnouncementReadCreate) check() error {
+ if _, ok := _c.mutation.AnnouncementID(); !ok {
+ return &ValidationError{Name: "announcement_id", err: errors.New(`ent: missing required field "AnnouncementRead.announcement_id"`)}
+ }
+ if _, ok := _c.mutation.UserID(); !ok {
+ return &ValidationError{Name: "user_id", err: errors.New(`ent: missing required field "AnnouncementRead.user_id"`)}
+ }
+ if _, ok := _c.mutation.ReadAt(); !ok {
+ return &ValidationError{Name: "read_at", err: errors.New(`ent: missing required field "AnnouncementRead.read_at"`)}
+ }
+ if _, ok := _c.mutation.CreatedAt(); !ok {
+ return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "AnnouncementRead.created_at"`)}
+ }
+ if len(_c.mutation.AnnouncementIDs()) == 0 {
+ return &ValidationError{Name: "announcement", err: errors.New(`ent: missing required edge "AnnouncementRead.announcement"`)}
+ }
+ if len(_c.mutation.UserIDs()) == 0 {
+ return &ValidationError{Name: "user", err: errors.New(`ent: missing required edge "AnnouncementRead.user"`)}
+ }
+ return nil
+}
+
+func (_c *AnnouncementReadCreate) sqlSave(ctx context.Context) (*AnnouncementRead, error) {
+ if err := _c.check(); err != nil {
+ return nil, err
+ }
+ _node, _spec := _c.createSpec()
+ if err := sqlgraph.CreateNode(ctx, _c.driver, _spec); err != nil {
+ if sqlgraph.IsConstraintError(err) {
+ err = &ConstraintError{msg: err.Error(), wrap: err}
+ }
+ return nil, err
+ }
+ id := _spec.ID.Value.(int64)
+ _node.ID = int64(id)
+ _c.mutation.id = &_node.ID
+ _c.mutation.done = true
+ return _node, nil
+}
+
+func (_c *AnnouncementReadCreate) createSpec() (*AnnouncementRead, *sqlgraph.CreateSpec) {
+ var (
+ _node = &AnnouncementRead{config: _c.config}
+ _spec = sqlgraph.NewCreateSpec(announcementread.Table, sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64))
+ )
+ _spec.OnConflict = _c.conflict
+ if value, ok := _c.mutation.ReadAt(); ok {
+ _spec.SetField(announcementread.FieldReadAt, field.TypeTime, value)
+ _node.ReadAt = value
+ }
+ if value, ok := _c.mutation.CreatedAt(); ok {
+ _spec.SetField(announcementread.FieldCreatedAt, field.TypeTime, value)
+ _node.CreatedAt = value
+ }
+ if nodes := _c.mutation.AnnouncementIDs(); len(nodes) > 0 {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.M2O,
+ Inverse: true,
+ Table: announcementread.AnnouncementTable,
+ Columns: []string{announcementread.AnnouncementColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcement.FieldID, field.TypeInt64),
+ },
+ }
+ for _, k := range nodes {
+ edge.Target.Nodes = append(edge.Target.Nodes, k)
+ }
+ _node.AnnouncementID = nodes[0]
+ _spec.Edges = append(_spec.Edges, edge)
+ }
+ if nodes := _c.mutation.UserIDs(); len(nodes) > 0 {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.M2O,
+ Inverse: true,
+ Table: announcementread.UserTable,
+ Columns: []string{announcementread.UserColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeInt64),
+ },
+ }
+ for _, k := range nodes {
+ edge.Target.Nodes = append(edge.Target.Nodes, k)
+ }
+ _node.UserID = nodes[0]
+ _spec.Edges = append(_spec.Edges, edge)
+ }
+ return _node, _spec
+}
+
+// OnConflict allows configuring the `ON CONFLICT` / `ON DUPLICATE KEY` clause
+// of the `INSERT` statement. For example:
+//
+// client.AnnouncementRead.Create().
+// SetAnnouncementID(v).
+// OnConflict(
+// // Update the row with the new values
+// // the was proposed for insertion.
+// sql.ResolveWithNewValues(),
+// ).
+// // Override some of the fields with custom
+// // update values.
+// Update(func(u *ent.AnnouncementReadUpsert) {
+// SetAnnouncementID(v+v).
+// }).
+// Exec(ctx)
+func (_c *AnnouncementReadCreate) OnConflict(opts ...sql.ConflictOption) *AnnouncementReadUpsertOne {
+ _c.conflict = opts
+ return &AnnouncementReadUpsertOne{
+ create: _c,
+ }
+}
+
+// OnConflictColumns calls `OnConflict` and configures the columns
+// as conflict target. Using this option is equivalent to using:
+//
+// client.AnnouncementRead.Create().
+// OnConflict(sql.ConflictColumns(columns...)).
+// Exec(ctx)
+func (_c *AnnouncementReadCreate) OnConflictColumns(columns ...string) *AnnouncementReadUpsertOne {
+ _c.conflict = append(_c.conflict, sql.ConflictColumns(columns...))
+ return &AnnouncementReadUpsertOne{
+ create: _c,
+ }
+}
+
+type (
+ // AnnouncementReadUpsertOne is the builder for "upsert"-ing
+ // one AnnouncementRead node.
+ AnnouncementReadUpsertOne struct {
+ create *AnnouncementReadCreate
+ }
+
+ // AnnouncementReadUpsert is the "OnConflict" setter.
+ AnnouncementReadUpsert struct {
+ *sql.UpdateSet
+ }
+)
+
+// SetAnnouncementID sets the "announcement_id" field.
+func (u *AnnouncementReadUpsert) SetAnnouncementID(v int64) *AnnouncementReadUpsert {
+ u.Set(announcementread.FieldAnnouncementID, v)
+ return u
+}
+
+// UpdateAnnouncementID sets the "announcement_id" field to the value that was provided on create.
+func (u *AnnouncementReadUpsert) UpdateAnnouncementID() *AnnouncementReadUpsert {
+ u.SetExcluded(announcementread.FieldAnnouncementID)
+ return u
+}
+
+// SetUserID sets the "user_id" field.
+func (u *AnnouncementReadUpsert) SetUserID(v int64) *AnnouncementReadUpsert {
+ u.Set(announcementread.FieldUserID, v)
+ return u
+}
+
+// UpdateUserID sets the "user_id" field to the value that was provided on create.
+func (u *AnnouncementReadUpsert) UpdateUserID() *AnnouncementReadUpsert {
+ u.SetExcluded(announcementread.FieldUserID)
+ return u
+}
+
+// SetReadAt sets the "read_at" field.
+func (u *AnnouncementReadUpsert) SetReadAt(v time.Time) *AnnouncementReadUpsert {
+ u.Set(announcementread.FieldReadAt, v)
+ return u
+}
+
+// UpdateReadAt sets the "read_at" field to the value that was provided on create.
+func (u *AnnouncementReadUpsert) UpdateReadAt() *AnnouncementReadUpsert {
+ u.SetExcluded(announcementread.FieldReadAt)
+ return u
+}
+
+// UpdateNewValues updates the mutable fields using the new values that were set on create.
+// Using this option is equivalent to using:
+//
+// client.AnnouncementRead.Create().
+// OnConflict(
+// sql.ResolveWithNewValues(),
+// ).
+// Exec(ctx)
+func (u *AnnouncementReadUpsertOne) UpdateNewValues() *AnnouncementReadUpsertOne {
+ u.create.conflict = append(u.create.conflict, sql.ResolveWithNewValues())
+ u.create.conflict = append(u.create.conflict, sql.ResolveWith(func(s *sql.UpdateSet) {
+ if _, exists := u.create.mutation.CreatedAt(); exists {
+ s.SetIgnore(announcementread.FieldCreatedAt)
+ }
+ }))
+ return u
+}
+
+// Ignore sets each column to itself in case of conflict.
+// Using this option is equivalent to using:
+//
+// client.AnnouncementRead.Create().
+// OnConflict(sql.ResolveWithIgnore()).
+// Exec(ctx)
+func (u *AnnouncementReadUpsertOne) Ignore() *AnnouncementReadUpsertOne {
+ u.create.conflict = append(u.create.conflict, sql.ResolveWithIgnore())
+ return u
+}
+
+// DoNothing configures the conflict_action to `DO NOTHING`.
+// Supported only by SQLite and PostgreSQL.
+func (u *AnnouncementReadUpsertOne) DoNothing() *AnnouncementReadUpsertOne {
+ u.create.conflict = append(u.create.conflict, sql.DoNothing())
+ return u
+}
+
+// Update allows overriding fields `UPDATE` values. See the AnnouncementReadCreate.OnConflict
+// documentation for more info.
+func (u *AnnouncementReadUpsertOne) Update(set func(*AnnouncementReadUpsert)) *AnnouncementReadUpsertOne {
+ u.create.conflict = append(u.create.conflict, sql.ResolveWith(func(update *sql.UpdateSet) {
+ set(&AnnouncementReadUpsert{UpdateSet: update})
+ }))
+ return u
+}
+
+// SetAnnouncementID sets the "announcement_id" field.
+func (u *AnnouncementReadUpsertOne) SetAnnouncementID(v int64) *AnnouncementReadUpsertOne {
+ return u.Update(func(s *AnnouncementReadUpsert) {
+ s.SetAnnouncementID(v)
+ })
+}
+
+// UpdateAnnouncementID sets the "announcement_id" field to the value that was provided on create.
+func (u *AnnouncementReadUpsertOne) UpdateAnnouncementID() *AnnouncementReadUpsertOne {
+ return u.Update(func(s *AnnouncementReadUpsert) {
+ s.UpdateAnnouncementID()
+ })
+}
+
+// SetUserID sets the "user_id" field.
+func (u *AnnouncementReadUpsertOne) SetUserID(v int64) *AnnouncementReadUpsertOne {
+ return u.Update(func(s *AnnouncementReadUpsert) {
+ s.SetUserID(v)
+ })
+}
+
+// UpdateUserID sets the "user_id" field to the value that was provided on create.
+func (u *AnnouncementReadUpsertOne) UpdateUserID() *AnnouncementReadUpsertOne {
+ return u.Update(func(s *AnnouncementReadUpsert) {
+ s.UpdateUserID()
+ })
+}
+
+// SetReadAt sets the "read_at" field.
+func (u *AnnouncementReadUpsertOne) SetReadAt(v time.Time) *AnnouncementReadUpsertOne {
+ return u.Update(func(s *AnnouncementReadUpsert) {
+ s.SetReadAt(v)
+ })
+}
+
+// UpdateReadAt sets the "read_at" field to the value that was provided on create.
+func (u *AnnouncementReadUpsertOne) UpdateReadAt() *AnnouncementReadUpsertOne {
+ return u.Update(func(s *AnnouncementReadUpsert) {
+ s.UpdateReadAt()
+ })
+}
+
+// Exec executes the query.
+func (u *AnnouncementReadUpsertOne) Exec(ctx context.Context) error {
+ if len(u.create.conflict) == 0 {
+ return errors.New("ent: missing options for AnnouncementReadCreate.OnConflict")
+ }
+ return u.create.Exec(ctx)
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (u *AnnouncementReadUpsertOne) ExecX(ctx context.Context) {
+ if err := u.create.Exec(ctx); err != nil {
+ panic(err)
+ }
+}
+
+// Exec executes the UPSERT query and returns the inserted/updated ID.
+func (u *AnnouncementReadUpsertOne) ID(ctx context.Context) (id int64, err error) {
+ node, err := u.create.Save(ctx)
+ if err != nil {
+ return id, err
+ }
+ return node.ID, nil
+}
+
+// IDX is like ID, but panics if an error occurs.
+func (u *AnnouncementReadUpsertOne) IDX(ctx context.Context) int64 {
+ id, err := u.ID(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return id
+}
+
+// AnnouncementReadCreateBulk is the builder for creating many AnnouncementRead entities in bulk.
+type AnnouncementReadCreateBulk struct {
+ config
+ err error
+ builders []*AnnouncementReadCreate
+ conflict []sql.ConflictOption
+}
+
+// Save creates the AnnouncementRead entities in the database.
+func (_c *AnnouncementReadCreateBulk) Save(ctx context.Context) ([]*AnnouncementRead, error) {
+ if _c.err != nil {
+ return nil, _c.err
+ }
+ specs := make([]*sqlgraph.CreateSpec, len(_c.builders))
+ nodes := make([]*AnnouncementRead, len(_c.builders))
+ mutators := make([]Mutator, len(_c.builders))
+ for i := range _c.builders {
+ func(i int, root context.Context) {
+ builder := _c.builders[i]
+ builder.defaults()
+ var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
+ mutation, ok := m.(*AnnouncementReadMutation)
+ if !ok {
+ return nil, fmt.Errorf("unexpected mutation type %T", m)
+ }
+ if err := builder.check(); err != nil {
+ return nil, err
+ }
+ builder.mutation = mutation
+ var err error
+ nodes[i], specs[i] = builder.createSpec()
+ if i < len(mutators)-1 {
+ _, err = mutators[i+1].Mutate(root, _c.builders[i+1].mutation)
+ } else {
+ spec := &sqlgraph.BatchCreateSpec{Nodes: specs}
+ spec.OnConflict = _c.conflict
+ // Invoke the actual operation on the latest mutation in the chain.
+ if err = sqlgraph.BatchCreate(ctx, _c.driver, spec); err != nil {
+ if sqlgraph.IsConstraintError(err) {
+ err = &ConstraintError{msg: err.Error(), wrap: err}
+ }
+ }
+ }
+ if err != nil {
+ return nil, err
+ }
+ mutation.id = &nodes[i].ID
+ if specs[i].ID.Value != nil {
+ id := specs[i].ID.Value.(int64)
+ nodes[i].ID = int64(id)
+ }
+ mutation.done = true
+ return nodes[i], nil
+ })
+ for i := len(builder.hooks) - 1; i >= 0; i-- {
+ mut = builder.hooks[i](mut)
+ }
+ mutators[i] = mut
+ }(i, ctx)
+ }
+ if len(mutators) > 0 {
+ if _, err := mutators[0].Mutate(ctx, _c.builders[0].mutation); err != nil {
+ return nil, err
+ }
+ }
+ return nodes, nil
+}
+
+// SaveX is like Save, but panics if an error occurs.
+func (_c *AnnouncementReadCreateBulk) SaveX(ctx context.Context) []*AnnouncementRead {
+ v, err := _c.Save(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return v
+}
+
+// Exec executes the query.
+func (_c *AnnouncementReadCreateBulk) Exec(ctx context.Context) error {
+ _, err := _c.Save(ctx)
+ return err
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (_c *AnnouncementReadCreateBulk) ExecX(ctx context.Context) {
+ if err := _c.Exec(ctx); err != nil {
+ panic(err)
+ }
+}
+
+// OnConflict allows configuring the `ON CONFLICT` / `ON DUPLICATE KEY` clause
+// of the `INSERT` statement. For example:
+//
+// client.AnnouncementRead.CreateBulk(builders...).
+// OnConflict(
+// // Update the row with the new values
+// // the was proposed for insertion.
+// sql.ResolveWithNewValues(),
+// ).
+// // Override some of the fields with custom
+// // update values.
+// Update(func(u *ent.AnnouncementReadUpsert) {
+// SetAnnouncementID(v+v).
+// }).
+// Exec(ctx)
+func (_c *AnnouncementReadCreateBulk) OnConflict(opts ...sql.ConflictOption) *AnnouncementReadUpsertBulk {
+ _c.conflict = opts
+ return &AnnouncementReadUpsertBulk{
+ create: _c,
+ }
+}
+
+// OnConflictColumns calls `OnConflict` and configures the columns
+// as conflict target. Using this option is equivalent to using:
+//
+// client.AnnouncementRead.Create().
+// OnConflict(sql.ConflictColumns(columns...)).
+// Exec(ctx)
+func (_c *AnnouncementReadCreateBulk) OnConflictColumns(columns ...string) *AnnouncementReadUpsertBulk {
+ _c.conflict = append(_c.conflict, sql.ConflictColumns(columns...))
+ return &AnnouncementReadUpsertBulk{
+ create: _c,
+ }
+}
+
+// AnnouncementReadUpsertBulk is the builder for "upsert"-ing
+// a bulk of AnnouncementRead nodes.
+type AnnouncementReadUpsertBulk struct {
+ create *AnnouncementReadCreateBulk
+}
+
+// UpdateNewValues updates the mutable fields using the new values that
+// were set on create. Using this option is equivalent to using:
+//
+// client.AnnouncementRead.Create().
+// OnConflict(
+// sql.ResolveWithNewValues(),
+// ).
+// Exec(ctx)
+func (u *AnnouncementReadUpsertBulk) UpdateNewValues() *AnnouncementReadUpsertBulk {
+ u.create.conflict = append(u.create.conflict, sql.ResolveWithNewValues())
+ u.create.conflict = append(u.create.conflict, sql.ResolveWith(func(s *sql.UpdateSet) {
+ for _, b := range u.create.builders {
+ if _, exists := b.mutation.CreatedAt(); exists {
+ s.SetIgnore(announcementread.FieldCreatedAt)
+ }
+ }
+ }))
+ return u
+}
+
+// Ignore sets each column to itself in case of conflict.
+// Using this option is equivalent to using:
+//
+// client.AnnouncementRead.Create().
+// OnConflict(sql.ResolveWithIgnore()).
+// Exec(ctx)
+func (u *AnnouncementReadUpsertBulk) Ignore() *AnnouncementReadUpsertBulk {
+ u.create.conflict = append(u.create.conflict, sql.ResolveWithIgnore())
+ return u
+}
+
+// DoNothing configures the conflict_action to `DO NOTHING`.
+// Supported only by SQLite and PostgreSQL.
+func (u *AnnouncementReadUpsertBulk) DoNothing() *AnnouncementReadUpsertBulk {
+ u.create.conflict = append(u.create.conflict, sql.DoNothing())
+ return u
+}
+
+// Update allows overriding fields `UPDATE` values. See the AnnouncementReadCreateBulk.OnConflict
+// documentation for more info.
+func (u *AnnouncementReadUpsertBulk) Update(set func(*AnnouncementReadUpsert)) *AnnouncementReadUpsertBulk {
+ u.create.conflict = append(u.create.conflict, sql.ResolveWith(func(update *sql.UpdateSet) {
+ set(&AnnouncementReadUpsert{UpdateSet: update})
+ }))
+ return u
+}
+
+// SetAnnouncementID sets the "announcement_id" field.
+func (u *AnnouncementReadUpsertBulk) SetAnnouncementID(v int64) *AnnouncementReadUpsertBulk {
+ return u.Update(func(s *AnnouncementReadUpsert) {
+ s.SetAnnouncementID(v)
+ })
+}
+
+// UpdateAnnouncementID sets the "announcement_id" field to the value that was provided on create.
+func (u *AnnouncementReadUpsertBulk) UpdateAnnouncementID() *AnnouncementReadUpsertBulk {
+ return u.Update(func(s *AnnouncementReadUpsert) {
+ s.UpdateAnnouncementID()
+ })
+}
+
+// SetUserID sets the "user_id" field.
+func (u *AnnouncementReadUpsertBulk) SetUserID(v int64) *AnnouncementReadUpsertBulk {
+ return u.Update(func(s *AnnouncementReadUpsert) {
+ s.SetUserID(v)
+ })
+}
+
+// UpdateUserID sets the "user_id" field to the value that was provided on create.
+func (u *AnnouncementReadUpsertBulk) UpdateUserID() *AnnouncementReadUpsertBulk {
+ return u.Update(func(s *AnnouncementReadUpsert) {
+ s.UpdateUserID()
+ })
+}
+
+// SetReadAt sets the "read_at" field.
+func (u *AnnouncementReadUpsertBulk) SetReadAt(v time.Time) *AnnouncementReadUpsertBulk {
+ return u.Update(func(s *AnnouncementReadUpsert) {
+ s.SetReadAt(v)
+ })
+}
+
+// UpdateReadAt sets the "read_at" field to the value that was provided on create.
+func (u *AnnouncementReadUpsertBulk) UpdateReadAt() *AnnouncementReadUpsertBulk {
+ return u.Update(func(s *AnnouncementReadUpsert) {
+ s.UpdateReadAt()
+ })
+}
+
+// Exec executes the query.
+func (u *AnnouncementReadUpsertBulk) Exec(ctx context.Context) error {
+ if u.create.err != nil {
+ return u.create.err
+ }
+ for i, b := range u.create.builders {
+ if len(b.conflict) != 0 {
+ return fmt.Errorf("ent: OnConflict was set for builder %d. Set it on the AnnouncementReadCreateBulk instead", i)
+ }
+ }
+ if len(u.create.conflict) == 0 {
+ return errors.New("ent: missing options for AnnouncementReadCreateBulk.OnConflict")
+ }
+ return u.create.Exec(ctx)
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (u *AnnouncementReadUpsertBulk) ExecX(ctx context.Context) {
+ if err := u.create.Exec(ctx); err != nil {
+ panic(err)
+ }
+}
diff --git a/backend/ent/announcementread_delete.go b/backend/ent/announcementread_delete.go
new file mode 100644
index 00000000..a4da0821
--- /dev/null
+++ b/backend/ent/announcementread_delete.go
@@ -0,0 +1,88 @@
+// Code generated by ent, DO NOT EDIT.
+
+package ent
+
+import (
+ "context"
+
+ "entgo.io/ent/dialect/sql"
+ "entgo.io/ent/dialect/sql/sqlgraph"
+ "entgo.io/ent/schema/field"
+ "github.com/Wei-Shaw/sub2api/ent/announcementread"
+ "github.com/Wei-Shaw/sub2api/ent/predicate"
+)
+
+// AnnouncementReadDelete is the builder for deleting a AnnouncementRead entity.
+type AnnouncementReadDelete struct {
+ config
+ hooks []Hook
+ mutation *AnnouncementReadMutation
+}
+
+// Where appends a list predicates to the AnnouncementReadDelete builder.
+func (_d *AnnouncementReadDelete) Where(ps ...predicate.AnnouncementRead) *AnnouncementReadDelete {
+ _d.mutation.Where(ps...)
+ return _d
+}
+
+// Exec executes the deletion query and returns how many vertices were deleted.
+func (_d *AnnouncementReadDelete) Exec(ctx context.Context) (int, error) {
+ return withHooks(ctx, _d.sqlExec, _d.mutation, _d.hooks)
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (_d *AnnouncementReadDelete) ExecX(ctx context.Context) int {
+ n, err := _d.Exec(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return n
+}
+
+func (_d *AnnouncementReadDelete) sqlExec(ctx context.Context) (int, error) {
+ _spec := sqlgraph.NewDeleteSpec(announcementread.Table, sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64))
+ if ps := _d.mutation.predicates; len(ps) > 0 {
+ _spec.Predicate = func(selector *sql.Selector) {
+ for i := range ps {
+ ps[i](selector)
+ }
+ }
+ }
+ affected, err := sqlgraph.DeleteNodes(ctx, _d.driver, _spec)
+ if err != nil && sqlgraph.IsConstraintError(err) {
+ err = &ConstraintError{msg: err.Error(), wrap: err}
+ }
+ _d.mutation.done = true
+ return affected, err
+}
+
+// AnnouncementReadDeleteOne is the builder for deleting a single AnnouncementRead entity.
+type AnnouncementReadDeleteOne struct {
+ _d *AnnouncementReadDelete
+}
+
+// Where appends a list predicates to the AnnouncementReadDelete builder.
+func (_d *AnnouncementReadDeleteOne) Where(ps ...predicate.AnnouncementRead) *AnnouncementReadDeleteOne {
+ _d._d.mutation.Where(ps...)
+ return _d
+}
+
+// Exec executes the deletion query.
+func (_d *AnnouncementReadDeleteOne) Exec(ctx context.Context) error {
+ n, err := _d._d.Exec(ctx)
+ switch {
+ case err != nil:
+ return err
+ case n == 0:
+ return &NotFoundError{announcementread.Label}
+ default:
+ return nil
+ }
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (_d *AnnouncementReadDeleteOne) ExecX(ctx context.Context) {
+ if err := _d.Exec(ctx); err != nil {
+ panic(err)
+ }
+}
diff --git a/backend/ent/announcementread_query.go b/backend/ent/announcementread_query.go
new file mode 100644
index 00000000..108299fd
--- /dev/null
+++ b/backend/ent/announcementread_query.go
@@ -0,0 +1,718 @@
+// Code generated by ent, DO NOT EDIT.
+
+package ent
+
+import (
+ "context"
+ "fmt"
+ "math"
+
+ "entgo.io/ent"
+ "entgo.io/ent/dialect"
+ "entgo.io/ent/dialect/sql"
+ "entgo.io/ent/dialect/sql/sqlgraph"
+ "entgo.io/ent/schema/field"
+ "github.com/Wei-Shaw/sub2api/ent/announcement"
+ "github.com/Wei-Shaw/sub2api/ent/announcementread"
+ "github.com/Wei-Shaw/sub2api/ent/predicate"
+ "github.com/Wei-Shaw/sub2api/ent/user"
+)
+
+// AnnouncementReadQuery is the builder for querying AnnouncementRead entities.
+type AnnouncementReadQuery struct {
+ config
+ ctx *QueryContext
+ order []announcementread.OrderOption
+ inters []Interceptor
+ predicates []predicate.AnnouncementRead
+ withAnnouncement *AnnouncementQuery
+ withUser *UserQuery
+ modifiers []func(*sql.Selector)
+ // intermediate query (i.e. traversal path).
+ sql *sql.Selector
+ path func(context.Context) (*sql.Selector, error)
+}
+
+// Where adds a new predicate for the AnnouncementReadQuery builder.
+func (_q *AnnouncementReadQuery) Where(ps ...predicate.AnnouncementRead) *AnnouncementReadQuery {
+ _q.predicates = append(_q.predicates, ps...)
+ return _q
+}
+
+// Limit the number of records to be returned by this query.
+func (_q *AnnouncementReadQuery) Limit(limit int) *AnnouncementReadQuery {
+ _q.ctx.Limit = &limit
+ return _q
+}
+
+// Offset to start from.
+func (_q *AnnouncementReadQuery) Offset(offset int) *AnnouncementReadQuery {
+ _q.ctx.Offset = &offset
+ return _q
+}
+
+// Unique configures the query builder to filter duplicate records on query.
+// By default, unique is set to true, and can be disabled using this method.
+func (_q *AnnouncementReadQuery) Unique(unique bool) *AnnouncementReadQuery {
+ _q.ctx.Unique = &unique
+ return _q
+}
+
+// Order specifies how the records should be ordered.
+func (_q *AnnouncementReadQuery) Order(o ...announcementread.OrderOption) *AnnouncementReadQuery {
+ _q.order = append(_q.order, o...)
+ return _q
+}
+
+// QueryAnnouncement chains the current query on the "announcement" edge.
+func (_q *AnnouncementReadQuery) QueryAnnouncement() *AnnouncementQuery {
+ query := (&AnnouncementClient{config: _q.config}).Query()
+ query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
+ if err := _q.prepareQuery(ctx); err != nil {
+ return nil, err
+ }
+ selector := _q.sqlQuery(ctx)
+ if err := selector.Err(); err != nil {
+ return nil, err
+ }
+ step := sqlgraph.NewStep(
+ sqlgraph.From(announcementread.Table, announcementread.FieldID, selector),
+ sqlgraph.To(announcement.Table, announcement.FieldID),
+ sqlgraph.Edge(sqlgraph.M2O, true, announcementread.AnnouncementTable, announcementread.AnnouncementColumn),
+ )
+ fromU = sqlgraph.SetNeighbors(_q.driver.Dialect(), step)
+ return fromU, nil
+ }
+ return query
+}
+
+// QueryUser chains the current query on the "user" edge.
+func (_q *AnnouncementReadQuery) QueryUser() *UserQuery {
+ query := (&UserClient{config: _q.config}).Query()
+ query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
+ if err := _q.prepareQuery(ctx); err != nil {
+ return nil, err
+ }
+ selector := _q.sqlQuery(ctx)
+ if err := selector.Err(); err != nil {
+ return nil, err
+ }
+ step := sqlgraph.NewStep(
+ sqlgraph.From(announcementread.Table, announcementread.FieldID, selector),
+ sqlgraph.To(user.Table, user.FieldID),
+ sqlgraph.Edge(sqlgraph.M2O, true, announcementread.UserTable, announcementread.UserColumn),
+ )
+ fromU = sqlgraph.SetNeighbors(_q.driver.Dialect(), step)
+ return fromU, nil
+ }
+ return query
+}
+
+// First returns the first AnnouncementRead entity from the query.
+// Returns a *NotFoundError when no AnnouncementRead was found.
+func (_q *AnnouncementReadQuery) First(ctx context.Context) (*AnnouncementRead, error) {
+ nodes, err := _q.Limit(1).All(setContextOp(ctx, _q.ctx, ent.OpQueryFirst))
+ if err != nil {
+ return nil, err
+ }
+ if len(nodes) == 0 {
+ return nil, &NotFoundError{announcementread.Label}
+ }
+ return nodes[0], nil
+}
+
+// FirstX is like First, but panics if an error occurs.
+func (_q *AnnouncementReadQuery) FirstX(ctx context.Context) *AnnouncementRead {
+ node, err := _q.First(ctx)
+ if err != nil && !IsNotFound(err) {
+ panic(err)
+ }
+ return node
+}
+
+// FirstID returns the first AnnouncementRead ID from the query.
+// Returns a *NotFoundError when no AnnouncementRead ID was found.
+func (_q *AnnouncementReadQuery) FirstID(ctx context.Context) (id int64, err error) {
+ var ids []int64
+ if ids, err = _q.Limit(1).IDs(setContextOp(ctx, _q.ctx, ent.OpQueryFirstID)); err != nil {
+ return
+ }
+ if len(ids) == 0 {
+ err = &NotFoundError{announcementread.Label}
+ return
+ }
+ return ids[0], nil
+}
+
+// FirstIDX is like FirstID, but panics if an error occurs.
+func (_q *AnnouncementReadQuery) FirstIDX(ctx context.Context) int64 {
+ id, err := _q.FirstID(ctx)
+ if err != nil && !IsNotFound(err) {
+ panic(err)
+ }
+ return id
+}
+
+// Only returns a single AnnouncementRead entity found by the query, ensuring it only returns one.
+// Returns a *NotSingularError when more than one AnnouncementRead entity is found.
+// Returns a *NotFoundError when no AnnouncementRead entities are found.
+func (_q *AnnouncementReadQuery) Only(ctx context.Context) (*AnnouncementRead, error) {
+ nodes, err := _q.Limit(2).All(setContextOp(ctx, _q.ctx, ent.OpQueryOnly))
+ if err != nil {
+ return nil, err
+ }
+ switch len(nodes) {
+ case 1:
+ return nodes[0], nil
+ case 0:
+ return nil, &NotFoundError{announcementread.Label}
+ default:
+ return nil, &NotSingularError{announcementread.Label}
+ }
+}
+
+// OnlyX is like Only, but panics if an error occurs.
+func (_q *AnnouncementReadQuery) OnlyX(ctx context.Context) *AnnouncementRead {
+ node, err := _q.Only(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return node
+}
+
+// OnlyID is like Only, but returns the only AnnouncementRead ID in the query.
+// Returns a *NotSingularError when more than one AnnouncementRead ID is found.
+// Returns a *NotFoundError when no entities are found.
+func (_q *AnnouncementReadQuery) OnlyID(ctx context.Context) (id int64, err error) {
+ var ids []int64
+ if ids, err = _q.Limit(2).IDs(setContextOp(ctx, _q.ctx, ent.OpQueryOnlyID)); err != nil {
+ return
+ }
+ switch len(ids) {
+ case 1:
+ id = ids[0]
+ case 0:
+ err = &NotFoundError{announcementread.Label}
+ default:
+ err = &NotSingularError{announcementread.Label}
+ }
+ return
+}
+
+// OnlyIDX is like OnlyID, but panics if an error occurs.
+func (_q *AnnouncementReadQuery) OnlyIDX(ctx context.Context) int64 {
+ id, err := _q.OnlyID(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return id
+}
+
+// All executes the query and returns a list of AnnouncementReads.
+func (_q *AnnouncementReadQuery) All(ctx context.Context) ([]*AnnouncementRead, error) {
+ ctx = setContextOp(ctx, _q.ctx, ent.OpQueryAll)
+ if err := _q.prepareQuery(ctx); err != nil {
+ return nil, err
+ }
+ qr := querierAll[[]*AnnouncementRead, *AnnouncementReadQuery]()
+ return withInterceptors[[]*AnnouncementRead](ctx, _q, qr, _q.inters)
+}
+
+// AllX is like All, but panics if an error occurs.
+func (_q *AnnouncementReadQuery) AllX(ctx context.Context) []*AnnouncementRead {
+ nodes, err := _q.All(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return nodes
+}
+
+// IDs executes the query and returns a list of AnnouncementRead IDs.
+func (_q *AnnouncementReadQuery) IDs(ctx context.Context) (ids []int64, err error) {
+ if _q.ctx.Unique == nil && _q.path != nil {
+ _q.Unique(true)
+ }
+ ctx = setContextOp(ctx, _q.ctx, ent.OpQueryIDs)
+ if err = _q.Select(announcementread.FieldID).Scan(ctx, &ids); err != nil {
+ return nil, err
+ }
+ return ids, nil
+}
+
+// IDsX is like IDs, but panics if an error occurs.
+func (_q *AnnouncementReadQuery) IDsX(ctx context.Context) []int64 {
+ ids, err := _q.IDs(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return ids
+}
+
+// Count returns the count of the given query.
+func (_q *AnnouncementReadQuery) Count(ctx context.Context) (int, error) {
+ ctx = setContextOp(ctx, _q.ctx, ent.OpQueryCount)
+ if err := _q.prepareQuery(ctx); err != nil {
+ return 0, err
+ }
+ return withInterceptors[int](ctx, _q, querierCount[*AnnouncementReadQuery](), _q.inters)
+}
+
+// CountX is like Count, but panics if an error occurs.
+func (_q *AnnouncementReadQuery) CountX(ctx context.Context) int {
+ count, err := _q.Count(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return count
+}
+
+// Exist returns true if the query has elements in the graph.
+func (_q *AnnouncementReadQuery) Exist(ctx context.Context) (bool, error) {
+ ctx = setContextOp(ctx, _q.ctx, ent.OpQueryExist)
+ switch _, err := _q.FirstID(ctx); {
+ case IsNotFound(err):
+ return false, nil
+ case err != nil:
+ return false, fmt.Errorf("ent: check existence: %w", err)
+ default:
+ return true, nil
+ }
+}
+
+// ExistX is like Exist, but panics if an error occurs.
+func (_q *AnnouncementReadQuery) ExistX(ctx context.Context) bool {
+ exist, err := _q.Exist(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return exist
+}
+
+// Clone returns a duplicate of the AnnouncementReadQuery builder, including all associated steps. It can be
+// used to prepare common query builders and use them differently after the clone is made.
+func (_q *AnnouncementReadQuery) Clone() *AnnouncementReadQuery {
+ if _q == nil {
+ return nil
+ }
+ return &AnnouncementReadQuery{
+ config: _q.config,
+ ctx: _q.ctx.Clone(),
+ order: append([]announcementread.OrderOption{}, _q.order...),
+ inters: append([]Interceptor{}, _q.inters...),
+ predicates: append([]predicate.AnnouncementRead{}, _q.predicates...),
+ withAnnouncement: _q.withAnnouncement.Clone(),
+ withUser: _q.withUser.Clone(),
+ // clone intermediate query.
+ sql: _q.sql.Clone(),
+ path: _q.path,
+ }
+}
+
+// WithAnnouncement tells the query-builder to eager-load the nodes that are connected to
+// the "announcement" edge. The optional arguments are used to configure the query builder of the edge.
+func (_q *AnnouncementReadQuery) WithAnnouncement(opts ...func(*AnnouncementQuery)) *AnnouncementReadQuery {
+ query := (&AnnouncementClient{config: _q.config}).Query()
+ for _, opt := range opts {
+ opt(query)
+ }
+ _q.withAnnouncement = query
+ return _q
+}
+
+// WithUser tells the query-builder to eager-load the nodes that are connected to
+// the "user" edge. The optional arguments are used to configure the query builder of the edge.
+func (_q *AnnouncementReadQuery) WithUser(opts ...func(*UserQuery)) *AnnouncementReadQuery {
+ query := (&UserClient{config: _q.config}).Query()
+ for _, opt := range opts {
+ opt(query)
+ }
+ _q.withUser = query
+ return _q
+}
+
+// GroupBy is used to group vertices by one or more fields/columns.
+// It is often used with aggregate functions, like: count, max, mean, min, sum.
+//
+// Example:
+//
+// var v []struct {
+// AnnouncementID int64 `json:"announcement_id,omitempty"`
+// Count int `json:"count,omitempty"`
+// }
+//
+// client.AnnouncementRead.Query().
+// GroupBy(announcementread.FieldAnnouncementID).
+// Aggregate(ent.Count()).
+// Scan(ctx, &v)
+func (_q *AnnouncementReadQuery) GroupBy(field string, fields ...string) *AnnouncementReadGroupBy {
+ _q.ctx.Fields = append([]string{field}, fields...)
+ grbuild := &AnnouncementReadGroupBy{build: _q}
+ grbuild.flds = &_q.ctx.Fields
+ grbuild.label = announcementread.Label
+ grbuild.scan = grbuild.Scan
+ return grbuild
+}
+
+// Select allows the selection one or more fields/columns for the given query,
+// instead of selecting all fields in the entity.
+//
+// Example:
+//
+// var v []struct {
+// AnnouncementID int64 `json:"announcement_id,omitempty"`
+// }
+//
+// client.AnnouncementRead.Query().
+// Select(announcementread.FieldAnnouncementID).
+// Scan(ctx, &v)
+func (_q *AnnouncementReadQuery) Select(fields ...string) *AnnouncementReadSelect {
+ _q.ctx.Fields = append(_q.ctx.Fields, fields...)
+ sbuild := &AnnouncementReadSelect{AnnouncementReadQuery: _q}
+ sbuild.label = announcementread.Label
+ sbuild.flds, sbuild.scan = &_q.ctx.Fields, sbuild.Scan
+ return sbuild
+}
+
+// Aggregate returns a AnnouncementReadSelect configured with the given aggregations.
+func (_q *AnnouncementReadQuery) Aggregate(fns ...AggregateFunc) *AnnouncementReadSelect {
+ return _q.Select().Aggregate(fns...)
+}
+
+func (_q *AnnouncementReadQuery) prepareQuery(ctx context.Context) error {
+ for _, inter := range _q.inters {
+ if inter == nil {
+ return fmt.Errorf("ent: uninitialized interceptor (forgotten import ent/runtime?)")
+ }
+ if trv, ok := inter.(Traverser); ok {
+ if err := trv.Traverse(ctx, _q); err != nil {
+ return err
+ }
+ }
+ }
+ for _, f := range _q.ctx.Fields {
+ if !announcementread.ValidColumn(f) {
+ return &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
+ }
+ }
+ if _q.path != nil {
+ prev, err := _q.path(ctx)
+ if err != nil {
+ return err
+ }
+ _q.sql = prev
+ }
+ return nil
+}
+
+func (_q *AnnouncementReadQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*AnnouncementRead, error) {
+ var (
+ nodes = []*AnnouncementRead{}
+ _spec = _q.querySpec()
+ loadedTypes = [2]bool{
+ _q.withAnnouncement != nil,
+ _q.withUser != nil,
+ }
+ )
+ _spec.ScanValues = func(columns []string) ([]any, error) {
+ return (*AnnouncementRead).scanValues(nil, columns)
+ }
+ _spec.Assign = func(columns []string, values []any) error {
+ node := &AnnouncementRead{config: _q.config}
+ nodes = append(nodes, node)
+ node.Edges.loadedTypes = loadedTypes
+ return node.assignValues(columns, values)
+ }
+ if len(_q.modifiers) > 0 {
+ _spec.Modifiers = _q.modifiers
+ }
+ for i := range hooks {
+ hooks[i](ctx, _spec)
+ }
+ if err := sqlgraph.QueryNodes(ctx, _q.driver, _spec); err != nil {
+ return nil, err
+ }
+ if len(nodes) == 0 {
+ return nodes, nil
+ }
+ if query := _q.withAnnouncement; query != nil {
+ if err := _q.loadAnnouncement(ctx, query, nodes, nil,
+ func(n *AnnouncementRead, e *Announcement) { n.Edges.Announcement = e }); err != nil {
+ return nil, err
+ }
+ }
+ if query := _q.withUser; query != nil {
+ if err := _q.loadUser(ctx, query, nodes, nil,
+ func(n *AnnouncementRead, e *User) { n.Edges.User = e }); err != nil {
+ return nil, err
+ }
+ }
+ return nodes, nil
+}
+
+func (_q *AnnouncementReadQuery) loadAnnouncement(ctx context.Context, query *AnnouncementQuery, nodes []*AnnouncementRead, init func(*AnnouncementRead), assign func(*AnnouncementRead, *Announcement)) error {
+ ids := make([]int64, 0, len(nodes))
+ nodeids := make(map[int64][]*AnnouncementRead)
+ for i := range nodes {
+ fk := nodes[i].AnnouncementID
+ if _, ok := nodeids[fk]; !ok {
+ ids = append(ids, fk)
+ }
+ nodeids[fk] = append(nodeids[fk], nodes[i])
+ }
+ if len(ids) == 0 {
+ return nil
+ }
+ query.Where(announcement.IDIn(ids...))
+ neighbors, err := query.All(ctx)
+ if err != nil {
+ return err
+ }
+ for _, n := range neighbors {
+ nodes, ok := nodeids[n.ID]
+ if !ok {
+ return fmt.Errorf(`unexpected foreign-key "announcement_id" returned %v`, n.ID)
+ }
+ for i := range nodes {
+ assign(nodes[i], n)
+ }
+ }
+ return nil
+}
+func (_q *AnnouncementReadQuery) loadUser(ctx context.Context, query *UserQuery, nodes []*AnnouncementRead, init func(*AnnouncementRead), assign func(*AnnouncementRead, *User)) error {
+ ids := make([]int64, 0, len(nodes))
+ nodeids := make(map[int64][]*AnnouncementRead)
+ for i := range nodes {
+ fk := nodes[i].UserID
+ if _, ok := nodeids[fk]; !ok {
+ ids = append(ids, fk)
+ }
+ nodeids[fk] = append(nodeids[fk], nodes[i])
+ }
+ if len(ids) == 0 {
+ return nil
+ }
+ query.Where(user.IDIn(ids...))
+ neighbors, err := query.All(ctx)
+ if err != nil {
+ return err
+ }
+ for _, n := range neighbors {
+ nodes, ok := nodeids[n.ID]
+ if !ok {
+ return fmt.Errorf(`unexpected foreign-key "user_id" returned %v`, n.ID)
+ }
+ for i := range nodes {
+ assign(nodes[i], n)
+ }
+ }
+ return nil
+}
+
+func (_q *AnnouncementReadQuery) sqlCount(ctx context.Context) (int, error) {
+ _spec := _q.querySpec()
+ if len(_q.modifiers) > 0 {
+ _spec.Modifiers = _q.modifiers
+ }
+ _spec.Node.Columns = _q.ctx.Fields
+ if len(_q.ctx.Fields) > 0 {
+ _spec.Unique = _q.ctx.Unique != nil && *_q.ctx.Unique
+ }
+ return sqlgraph.CountNodes(ctx, _q.driver, _spec)
+}
+
+func (_q *AnnouncementReadQuery) querySpec() *sqlgraph.QuerySpec {
+ _spec := sqlgraph.NewQuerySpec(announcementread.Table, announcementread.Columns, sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64))
+ _spec.From = _q.sql
+ if unique := _q.ctx.Unique; unique != nil {
+ _spec.Unique = *unique
+ } else if _q.path != nil {
+ _spec.Unique = true
+ }
+ if fields := _q.ctx.Fields; len(fields) > 0 {
+ _spec.Node.Columns = make([]string, 0, len(fields))
+ _spec.Node.Columns = append(_spec.Node.Columns, announcementread.FieldID)
+ for i := range fields {
+ if fields[i] != announcementread.FieldID {
+ _spec.Node.Columns = append(_spec.Node.Columns, fields[i])
+ }
+ }
+ if _q.withAnnouncement != nil {
+ _spec.Node.AddColumnOnce(announcementread.FieldAnnouncementID)
+ }
+ if _q.withUser != nil {
+ _spec.Node.AddColumnOnce(announcementread.FieldUserID)
+ }
+ }
+ if ps := _q.predicates; len(ps) > 0 {
+ _spec.Predicate = func(selector *sql.Selector) {
+ for i := range ps {
+ ps[i](selector)
+ }
+ }
+ }
+ if limit := _q.ctx.Limit; limit != nil {
+ _spec.Limit = *limit
+ }
+ if offset := _q.ctx.Offset; offset != nil {
+ _spec.Offset = *offset
+ }
+ if ps := _q.order; len(ps) > 0 {
+ _spec.Order = func(selector *sql.Selector) {
+ for i := range ps {
+ ps[i](selector)
+ }
+ }
+ }
+ return _spec
+}
+
+func (_q *AnnouncementReadQuery) sqlQuery(ctx context.Context) *sql.Selector {
+ builder := sql.Dialect(_q.driver.Dialect())
+ t1 := builder.Table(announcementread.Table)
+ columns := _q.ctx.Fields
+ if len(columns) == 0 {
+ columns = announcementread.Columns
+ }
+ selector := builder.Select(t1.Columns(columns...)...).From(t1)
+ if _q.sql != nil {
+ selector = _q.sql
+ selector.Select(selector.Columns(columns...)...)
+ }
+ if _q.ctx.Unique != nil && *_q.ctx.Unique {
+ selector.Distinct()
+ }
+ for _, m := range _q.modifiers {
+ m(selector)
+ }
+ for _, p := range _q.predicates {
+ p(selector)
+ }
+ for _, p := range _q.order {
+ p(selector)
+ }
+ if offset := _q.ctx.Offset; offset != nil {
+ // limit is mandatory for offset clause. We start
+ // with default value, and override it below if needed.
+ selector.Offset(*offset).Limit(math.MaxInt32)
+ }
+ if limit := _q.ctx.Limit; limit != nil {
+ selector.Limit(*limit)
+ }
+ return selector
+}
+
+// ForUpdate locks the selected rows against concurrent updates, and prevent them from being
+// updated, deleted or "selected ... for update" by other sessions, until the transaction is
+// either committed or rolled-back.
+func (_q *AnnouncementReadQuery) ForUpdate(opts ...sql.LockOption) *AnnouncementReadQuery {
+ if _q.driver.Dialect() == dialect.Postgres {
+ _q.Unique(false)
+ }
+ _q.modifiers = append(_q.modifiers, func(s *sql.Selector) {
+ s.ForUpdate(opts...)
+ })
+ return _q
+}
+
+// ForShare behaves similarly to ForUpdate, except that it acquires a shared mode lock
+// on any rows that are read. Other sessions can read the rows, but cannot modify them
+// until your transaction commits.
+func (_q *AnnouncementReadQuery) ForShare(opts ...sql.LockOption) *AnnouncementReadQuery {
+ if _q.driver.Dialect() == dialect.Postgres {
+ _q.Unique(false)
+ }
+ _q.modifiers = append(_q.modifiers, func(s *sql.Selector) {
+ s.ForShare(opts...)
+ })
+ return _q
+}
+
+// AnnouncementReadGroupBy is the group-by builder for AnnouncementRead entities.
+type AnnouncementReadGroupBy struct {
+ selector
+ build *AnnouncementReadQuery
+}
+
+// Aggregate adds the given aggregation functions to the group-by query.
+func (_g *AnnouncementReadGroupBy) Aggregate(fns ...AggregateFunc) *AnnouncementReadGroupBy {
+ _g.fns = append(_g.fns, fns...)
+ return _g
+}
+
+// Scan applies the selector query and scans the result into the given value.
+func (_g *AnnouncementReadGroupBy) Scan(ctx context.Context, v any) error {
+ ctx = setContextOp(ctx, _g.build.ctx, ent.OpQueryGroupBy)
+ if err := _g.build.prepareQuery(ctx); err != nil {
+ return err
+ }
+ return scanWithInterceptors[*AnnouncementReadQuery, *AnnouncementReadGroupBy](ctx, _g.build, _g, _g.build.inters, v)
+}
+
+func (_g *AnnouncementReadGroupBy) sqlScan(ctx context.Context, root *AnnouncementReadQuery, v any) error {
+ selector := root.sqlQuery(ctx).Select()
+ aggregation := make([]string, 0, len(_g.fns))
+ for _, fn := range _g.fns {
+ aggregation = append(aggregation, fn(selector))
+ }
+ if len(selector.SelectedColumns()) == 0 {
+ columns := make([]string, 0, len(*_g.flds)+len(_g.fns))
+ for _, f := range *_g.flds {
+ columns = append(columns, selector.C(f))
+ }
+ columns = append(columns, aggregation...)
+ selector.Select(columns...)
+ }
+ selector.GroupBy(selector.Columns(*_g.flds...)...)
+ if err := selector.Err(); err != nil {
+ return err
+ }
+ rows := &sql.Rows{}
+ query, args := selector.Query()
+ if err := _g.build.driver.Query(ctx, query, args, rows); err != nil {
+ return err
+ }
+ defer rows.Close()
+ return sql.ScanSlice(rows, v)
+}
+
+// AnnouncementReadSelect is the builder for selecting fields of AnnouncementRead entities.
+type AnnouncementReadSelect struct {
+ *AnnouncementReadQuery
+ selector
+}
+
+// Aggregate adds the given aggregation functions to the selector query.
+func (_s *AnnouncementReadSelect) Aggregate(fns ...AggregateFunc) *AnnouncementReadSelect {
+ _s.fns = append(_s.fns, fns...)
+ return _s
+}
+
+// Scan applies the selector query and scans the result into the given value.
+func (_s *AnnouncementReadSelect) Scan(ctx context.Context, v any) error {
+ ctx = setContextOp(ctx, _s.ctx, ent.OpQuerySelect)
+ if err := _s.prepareQuery(ctx); err != nil {
+ return err
+ }
+ return scanWithInterceptors[*AnnouncementReadQuery, *AnnouncementReadSelect](ctx, _s.AnnouncementReadQuery, _s, _s.inters, v)
+}
+
+func (_s *AnnouncementReadSelect) sqlScan(ctx context.Context, root *AnnouncementReadQuery, v any) error {
+ selector := root.sqlQuery(ctx)
+ aggregation := make([]string, 0, len(_s.fns))
+ for _, fn := range _s.fns {
+ aggregation = append(aggregation, fn(selector))
+ }
+ switch n := len(*_s.selector.flds); {
+ case n == 0 && len(aggregation) > 0:
+ selector.Select(aggregation...)
+ case n != 0 && len(aggregation) > 0:
+ selector.AppendSelect(aggregation...)
+ }
+ rows := &sql.Rows{}
+ query, args := selector.Query()
+ if err := _s.driver.Query(ctx, query, args, rows); err != nil {
+ return err
+ }
+ defer rows.Close()
+ return sql.ScanSlice(rows, v)
+}
diff --git a/backend/ent/announcementread_update.go b/backend/ent/announcementread_update.go
new file mode 100644
index 00000000..55a4eef8
--- /dev/null
+++ b/backend/ent/announcementread_update.go
@@ -0,0 +1,456 @@
+// Code generated by ent, DO NOT EDIT.
+
+package ent
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "time"
+
+ "entgo.io/ent/dialect/sql"
+ "entgo.io/ent/dialect/sql/sqlgraph"
+ "entgo.io/ent/schema/field"
+ "github.com/Wei-Shaw/sub2api/ent/announcement"
+ "github.com/Wei-Shaw/sub2api/ent/announcementread"
+ "github.com/Wei-Shaw/sub2api/ent/predicate"
+ "github.com/Wei-Shaw/sub2api/ent/user"
+)
+
+// AnnouncementReadUpdate is the builder for updating AnnouncementRead entities.
+type AnnouncementReadUpdate struct {
+ config
+ hooks []Hook
+ mutation *AnnouncementReadMutation
+}
+
+// Where appends a list predicates to the AnnouncementReadUpdate builder.
+func (_u *AnnouncementReadUpdate) Where(ps ...predicate.AnnouncementRead) *AnnouncementReadUpdate {
+ _u.mutation.Where(ps...)
+ return _u
+}
+
+// SetAnnouncementID sets the "announcement_id" field.
+func (_u *AnnouncementReadUpdate) SetAnnouncementID(v int64) *AnnouncementReadUpdate {
+ _u.mutation.SetAnnouncementID(v)
+ return _u
+}
+
+// SetNillableAnnouncementID sets the "announcement_id" field if the given value is not nil.
+func (_u *AnnouncementReadUpdate) SetNillableAnnouncementID(v *int64) *AnnouncementReadUpdate {
+ if v != nil {
+ _u.SetAnnouncementID(*v)
+ }
+ return _u
+}
+
+// SetUserID sets the "user_id" field.
+func (_u *AnnouncementReadUpdate) SetUserID(v int64) *AnnouncementReadUpdate {
+ _u.mutation.SetUserID(v)
+ return _u
+}
+
+// SetNillableUserID sets the "user_id" field if the given value is not nil.
+func (_u *AnnouncementReadUpdate) SetNillableUserID(v *int64) *AnnouncementReadUpdate {
+ if v != nil {
+ _u.SetUserID(*v)
+ }
+ return _u
+}
+
+// SetReadAt sets the "read_at" field.
+func (_u *AnnouncementReadUpdate) SetReadAt(v time.Time) *AnnouncementReadUpdate {
+ _u.mutation.SetReadAt(v)
+ return _u
+}
+
+// SetNillableReadAt sets the "read_at" field if the given value is not nil.
+func (_u *AnnouncementReadUpdate) SetNillableReadAt(v *time.Time) *AnnouncementReadUpdate {
+ if v != nil {
+ _u.SetReadAt(*v)
+ }
+ return _u
+}
+
+// SetAnnouncement sets the "announcement" edge to the Announcement entity.
+func (_u *AnnouncementReadUpdate) SetAnnouncement(v *Announcement) *AnnouncementReadUpdate {
+ return _u.SetAnnouncementID(v.ID)
+}
+
+// SetUser sets the "user" edge to the User entity.
+func (_u *AnnouncementReadUpdate) SetUser(v *User) *AnnouncementReadUpdate {
+ return _u.SetUserID(v.ID)
+}
+
+// Mutation returns the AnnouncementReadMutation object of the builder.
+func (_u *AnnouncementReadUpdate) Mutation() *AnnouncementReadMutation {
+ return _u.mutation
+}
+
+// ClearAnnouncement clears the "announcement" edge to the Announcement entity.
+func (_u *AnnouncementReadUpdate) ClearAnnouncement() *AnnouncementReadUpdate {
+ _u.mutation.ClearAnnouncement()
+ return _u
+}
+
+// ClearUser clears the "user" edge to the User entity.
+func (_u *AnnouncementReadUpdate) ClearUser() *AnnouncementReadUpdate {
+ _u.mutation.ClearUser()
+ return _u
+}
+
+// Save executes the query and returns the number of nodes affected by the update operation.
+func (_u *AnnouncementReadUpdate) Save(ctx context.Context) (int, error) {
+ return withHooks(ctx, _u.sqlSave, _u.mutation, _u.hooks)
+}
+
+// SaveX is like Save, but panics if an error occurs.
+func (_u *AnnouncementReadUpdate) SaveX(ctx context.Context) int {
+ affected, err := _u.Save(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return affected
+}
+
+// Exec executes the query.
+func (_u *AnnouncementReadUpdate) Exec(ctx context.Context) error {
+ _, err := _u.Save(ctx)
+ return err
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (_u *AnnouncementReadUpdate) ExecX(ctx context.Context) {
+ if err := _u.Exec(ctx); err != nil {
+ panic(err)
+ }
+}
+
+// check runs all checks and user-defined validators on the builder.
+func (_u *AnnouncementReadUpdate) check() error {
+ if _u.mutation.AnnouncementCleared() && len(_u.mutation.AnnouncementIDs()) > 0 {
+ return errors.New(`ent: clearing a required unique edge "AnnouncementRead.announcement"`)
+ }
+ if _u.mutation.UserCleared() && len(_u.mutation.UserIDs()) > 0 {
+ return errors.New(`ent: clearing a required unique edge "AnnouncementRead.user"`)
+ }
+ return nil
+}
+
+func (_u *AnnouncementReadUpdate) sqlSave(ctx context.Context) (_node int, err error) {
+ if err := _u.check(); err != nil {
+ return _node, err
+ }
+ _spec := sqlgraph.NewUpdateSpec(announcementread.Table, announcementread.Columns, sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64))
+ if ps := _u.mutation.predicates; len(ps) > 0 {
+ _spec.Predicate = func(selector *sql.Selector) {
+ for i := range ps {
+ ps[i](selector)
+ }
+ }
+ }
+ if value, ok := _u.mutation.ReadAt(); ok {
+ _spec.SetField(announcementread.FieldReadAt, field.TypeTime, value)
+ }
+ if _u.mutation.AnnouncementCleared() {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.M2O,
+ Inverse: true,
+ Table: announcementread.AnnouncementTable,
+ Columns: []string{announcementread.AnnouncementColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcement.FieldID, field.TypeInt64),
+ },
+ }
+ _spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+ }
+ if nodes := _u.mutation.AnnouncementIDs(); len(nodes) > 0 {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.M2O,
+ Inverse: true,
+ Table: announcementread.AnnouncementTable,
+ Columns: []string{announcementread.AnnouncementColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcement.FieldID, field.TypeInt64),
+ },
+ }
+ for _, k := range nodes {
+ edge.Target.Nodes = append(edge.Target.Nodes, k)
+ }
+ _spec.Edges.Add = append(_spec.Edges.Add, edge)
+ }
+ if _u.mutation.UserCleared() {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.M2O,
+ Inverse: true,
+ Table: announcementread.UserTable,
+ Columns: []string{announcementread.UserColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeInt64),
+ },
+ }
+ _spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+ }
+ if nodes := _u.mutation.UserIDs(); len(nodes) > 0 {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.M2O,
+ Inverse: true,
+ Table: announcementread.UserTable,
+ Columns: []string{announcementread.UserColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeInt64),
+ },
+ }
+ for _, k := range nodes {
+ edge.Target.Nodes = append(edge.Target.Nodes, k)
+ }
+ _spec.Edges.Add = append(_spec.Edges.Add, edge)
+ }
+ if _node, err = sqlgraph.UpdateNodes(ctx, _u.driver, _spec); err != nil {
+ if _, ok := err.(*sqlgraph.NotFoundError); ok {
+ err = &NotFoundError{announcementread.Label}
+ } else if sqlgraph.IsConstraintError(err) {
+ err = &ConstraintError{msg: err.Error(), wrap: err}
+ }
+ return 0, err
+ }
+ _u.mutation.done = true
+ return _node, nil
+}
+
+// AnnouncementReadUpdateOne is the builder for updating a single AnnouncementRead entity.
+type AnnouncementReadUpdateOne struct {
+ config
+ fields []string
+ hooks []Hook
+ mutation *AnnouncementReadMutation
+}
+
+// SetAnnouncementID sets the "announcement_id" field.
+func (_u *AnnouncementReadUpdateOne) SetAnnouncementID(v int64) *AnnouncementReadUpdateOne {
+ _u.mutation.SetAnnouncementID(v)
+ return _u
+}
+
+// SetNillableAnnouncementID sets the "announcement_id" field if the given value is not nil.
+func (_u *AnnouncementReadUpdateOne) SetNillableAnnouncementID(v *int64) *AnnouncementReadUpdateOne {
+ if v != nil {
+ _u.SetAnnouncementID(*v)
+ }
+ return _u
+}
+
+// SetUserID sets the "user_id" field.
+func (_u *AnnouncementReadUpdateOne) SetUserID(v int64) *AnnouncementReadUpdateOne {
+ _u.mutation.SetUserID(v)
+ return _u
+}
+
+// SetNillableUserID sets the "user_id" field if the given value is not nil.
+func (_u *AnnouncementReadUpdateOne) SetNillableUserID(v *int64) *AnnouncementReadUpdateOne {
+ if v != nil {
+ _u.SetUserID(*v)
+ }
+ return _u
+}
+
+// SetReadAt sets the "read_at" field.
+func (_u *AnnouncementReadUpdateOne) SetReadAt(v time.Time) *AnnouncementReadUpdateOne {
+ _u.mutation.SetReadAt(v)
+ return _u
+}
+
+// SetNillableReadAt sets the "read_at" field if the given value is not nil.
+func (_u *AnnouncementReadUpdateOne) SetNillableReadAt(v *time.Time) *AnnouncementReadUpdateOne {
+ if v != nil {
+ _u.SetReadAt(*v)
+ }
+ return _u
+}
+
+// SetAnnouncement sets the "announcement" edge to the Announcement entity.
+func (_u *AnnouncementReadUpdateOne) SetAnnouncement(v *Announcement) *AnnouncementReadUpdateOne {
+ return _u.SetAnnouncementID(v.ID)
+}
+
+// SetUser sets the "user" edge to the User entity.
+func (_u *AnnouncementReadUpdateOne) SetUser(v *User) *AnnouncementReadUpdateOne {
+ return _u.SetUserID(v.ID)
+}
+
+// Mutation returns the AnnouncementReadMutation object of the builder.
+func (_u *AnnouncementReadUpdateOne) Mutation() *AnnouncementReadMutation {
+ return _u.mutation
+}
+
+// ClearAnnouncement clears the "announcement" edge to the Announcement entity.
+func (_u *AnnouncementReadUpdateOne) ClearAnnouncement() *AnnouncementReadUpdateOne {
+ _u.mutation.ClearAnnouncement()
+ return _u
+}
+
+// ClearUser clears the "user" edge to the User entity.
+func (_u *AnnouncementReadUpdateOne) ClearUser() *AnnouncementReadUpdateOne {
+ _u.mutation.ClearUser()
+ return _u
+}
+
+// Where appends a list predicates to the AnnouncementReadUpdate builder.
+func (_u *AnnouncementReadUpdateOne) Where(ps ...predicate.AnnouncementRead) *AnnouncementReadUpdateOne {
+ _u.mutation.Where(ps...)
+ return _u
+}
+
+// Select allows selecting one or more fields (columns) of the returned entity.
+// The default is selecting all fields defined in the entity schema.
+func (_u *AnnouncementReadUpdateOne) Select(field string, fields ...string) *AnnouncementReadUpdateOne {
+ _u.fields = append([]string{field}, fields...)
+ return _u
+}
+
+// Save executes the query and returns the updated AnnouncementRead entity.
+func (_u *AnnouncementReadUpdateOne) Save(ctx context.Context) (*AnnouncementRead, error) {
+ return withHooks(ctx, _u.sqlSave, _u.mutation, _u.hooks)
+}
+
+// SaveX is like Save, but panics if an error occurs.
+func (_u *AnnouncementReadUpdateOne) SaveX(ctx context.Context) *AnnouncementRead {
+ node, err := _u.Save(ctx)
+ if err != nil {
+ panic(err)
+ }
+ return node
+}
+
+// Exec executes the query on the entity.
+func (_u *AnnouncementReadUpdateOne) Exec(ctx context.Context) error {
+ _, err := _u.Save(ctx)
+ return err
+}
+
+// ExecX is like Exec, but panics if an error occurs.
+func (_u *AnnouncementReadUpdateOne) ExecX(ctx context.Context) {
+ if err := _u.Exec(ctx); err != nil {
+ panic(err)
+ }
+}
+
+// check runs all checks and user-defined validators on the builder.
+func (_u *AnnouncementReadUpdateOne) check() error {
+ if _u.mutation.AnnouncementCleared() && len(_u.mutation.AnnouncementIDs()) > 0 {
+ return errors.New(`ent: clearing a required unique edge "AnnouncementRead.announcement"`)
+ }
+ if _u.mutation.UserCleared() && len(_u.mutation.UserIDs()) > 0 {
+ return errors.New(`ent: clearing a required unique edge "AnnouncementRead.user"`)
+ }
+ return nil
+}
+
+func (_u *AnnouncementReadUpdateOne) sqlSave(ctx context.Context) (_node *AnnouncementRead, err error) {
+ if err := _u.check(); err != nil {
+ return _node, err
+ }
+ _spec := sqlgraph.NewUpdateSpec(announcementread.Table, announcementread.Columns, sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64))
+ id, ok := _u.mutation.ID()
+ if !ok {
+ return nil, &ValidationError{Name: "id", err: errors.New(`ent: missing "AnnouncementRead.id" for update`)}
+ }
+ _spec.Node.ID.Value = id
+ if fields := _u.fields; len(fields) > 0 {
+ _spec.Node.Columns = make([]string, 0, len(fields))
+ _spec.Node.Columns = append(_spec.Node.Columns, announcementread.FieldID)
+ for _, f := range fields {
+ if !announcementread.ValidColumn(f) {
+ return nil, &ValidationError{Name: f, err: fmt.Errorf("ent: invalid field %q for query", f)}
+ }
+ if f != announcementread.FieldID {
+ _spec.Node.Columns = append(_spec.Node.Columns, f)
+ }
+ }
+ }
+ if ps := _u.mutation.predicates; len(ps) > 0 {
+ _spec.Predicate = func(selector *sql.Selector) {
+ for i := range ps {
+ ps[i](selector)
+ }
+ }
+ }
+ if value, ok := _u.mutation.ReadAt(); ok {
+ _spec.SetField(announcementread.FieldReadAt, field.TypeTime, value)
+ }
+ if _u.mutation.AnnouncementCleared() {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.M2O,
+ Inverse: true,
+ Table: announcementread.AnnouncementTable,
+ Columns: []string{announcementread.AnnouncementColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcement.FieldID, field.TypeInt64),
+ },
+ }
+ _spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+ }
+ if nodes := _u.mutation.AnnouncementIDs(); len(nodes) > 0 {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.M2O,
+ Inverse: true,
+ Table: announcementread.AnnouncementTable,
+ Columns: []string{announcementread.AnnouncementColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcement.FieldID, field.TypeInt64),
+ },
+ }
+ for _, k := range nodes {
+ edge.Target.Nodes = append(edge.Target.Nodes, k)
+ }
+ _spec.Edges.Add = append(_spec.Edges.Add, edge)
+ }
+ if _u.mutation.UserCleared() {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.M2O,
+ Inverse: true,
+ Table: announcementread.UserTable,
+ Columns: []string{announcementread.UserColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeInt64),
+ },
+ }
+ _spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+ }
+ if nodes := _u.mutation.UserIDs(); len(nodes) > 0 {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.M2O,
+ Inverse: true,
+ Table: announcementread.UserTable,
+ Columns: []string{announcementread.UserColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(user.FieldID, field.TypeInt64),
+ },
+ }
+ for _, k := range nodes {
+ edge.Target.Nodes = append(edge.Target.Nodes, k)
+ }
+ _spec.Edges.Add = append(_spec.Edges.Add, edge)
+ }
+ _node = &AnnouncementRead{config: _u.config}
+ _spec.Assign = _node.assignValues
+ _spec.ScanValues = _node.scanValues
+ if err = sqlgraph.UpdateNode(ctx, _u.driver, _spec); err != nil {
+ if _, ok := err.(*sqlgraph.NotFoundError); ok {
+ err = &NotFoundError{announcementread.Label}
+ } else if sqlgraph.IsConstraintError(err) {
+ err = &ConstraintError{msg: err.Error(), wrap: err}
+ }
+ return nil, err
+ }
+ _u.mutation.done = true
+ return _node, nil
+}
diff --git a/backend/ent/client.go b/backend/ent/client.go
index f6c13e84..a17721da 100644
--- a/backend/ent/client.go
+++ b/backend/ent/client.go
@@ -17,6 +17,8 @@ import (
"entgo.io/ent/dialect/sql/sqlgraph"
"github.com/Wei-Shaw/sub2api/ent/account"
"github.com/Wei-Shaw/sub2api/ent/accountgroup"
+ "github.com/Wei-Shaw/sub2api/ent/announcement"
+ "github.com/Wei-Shaw/sub2api/ent/announcementread"
"github.com/Wei-Shaw/sub2api/ent/apikey"
"github.com/Wei-Shaw/sub2api/ent/group"
"github.com/Wei-Shaw/sub2api/ent/promocode"
@@ -46,6 +48,10 @@ type Client struct {
Account *AccountClient
// AccountGroup is the client for interacting with the AccountGroup builders.
AccountGroup *AccountGroupClient
+ // Announcement is the client for interacting with the Announcement builders.
+ Announcement *AnnouncementClient
+ // AnnouncementRead is the client for interacting with the AnnouncementRead builders.
+ AnnouncementRead *AnnouncementReadClient
// Group is the client for interacting with the Group builders.
Group *GroupClient
// PromoCode is the client for interacting with the PromoCode builders.
@@ -86,6 +92,8 @@ func (c *Client) init() {
c.APIKey = NewAPIKeyClient(c.config)
c.Account = NewAccountClient(c.config)
c.AccountGroup = NewAccountGroupClient(c.config)
+ c.Announcement = NewAnnouncementClient(c.config)
+ c.AnnouncementRead = NewAnnouncementReadClient(c.config)
c.Group = NewGroupClient(c.config)
c.PromoCode = NewPromoCodeClient(c.config)
c.PromoCodeUsage = NewPromoCodeUsageClient(c.config)
@@ -194,6 +202,8 @@ func (c *Client) Tx(ctx context.Context) (*Tx, error) {
APIKey: NewAPIKeyClient(cfg),
Account: NewAccountClient(cfg),
AccountGroup: NewAccountGroupClient(cfg),
+ Announcement: NewAnnouncementClient(cfg),
+ AnnouncementRead: NewAnnouncementReadClient(cfg),
Group: NewGroupClient(cfg),
PromoCode: NewPromoCodeClient(cfg),
PromoCodeUsage: NewPromoCodeUsageClient(cfg),
@@ -229,6 +239,8 @@ func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error)
APIKey: NewAPIKeyClient(cfg),
Account: NewAccountClient(cfg),
AccountGroup: NewAccountGroupClient(cfg),
+ Announcement: NewAnnouncementClient(cfg),
+ AnnouncementRead: NewAnnouncementReadClient(cfg),
Group: NewGroupClient(cfg),
PromoCode: NewPromoCodeClient(cfg),
PromoCodeUsage: NewPromoCodeUsageClient(cfg),
@@ -271,10 +283,10 @@ func (c *Client) Close() error {
// In order to add hooks to a specific client, call: `client.Node.Use(...)`.
func (c *Client) Use(hooks ...Hook) {
for _, n := range []interface{ Use(...Hook) }{
- c.APIKey, c.Account, c.AccountGroup, c.Group, c.PromoCode, c.PromoCodeUsage,
- c.Proxy, c.RedeemCode, c.Setting, c.UsageCleanupTask, c.UsageLog, c.User,
- c.UserAllowedGroup, c.UserAttributeDefinition, c.UserAttributeValue,
- c.UserSubscription,
+ c.APIKey, c.Account, c.AccountGroup, c.Announcement, c.AnnouncementRead,
+ c.Group, c.PromoCode, c.PromoCodeUsage, c.Proxy, c.RedeemCode, c.Setting,
+ c.UsageCleanupTask, c.UsageLog, c.User, c.UserAllowedGroup,
+ c.UserAttributeDefinition, c.UserAttributeValue, c.UserSubscription,
} {
n.Use(hooks...)
}
@@ -284,10 +296,10 @@ func (c *Client) Use(hooks ...Hook) {
// In order to add interceptors to a specific client, call: `client.Node.Intercept(...)`.
func (c *Client) Intercept(interceptors ...Interceptor) {
for _, n := range []interface{ Intercept(...Interceptor) }{
- c.APIKey, c.Account, c.AccountGroup, c.Group, c.PromoCode, c.PromoCodeUsage,
- c.Proxy, c.RedeemCode, c.Setting, c.UsageCleanupTask, c.UsageLog, c.User,
- c.UserAllowedGroup, c.UserAttributeDefinition, c.UserAttributeValue,
- c.UserSubscription,
+ c.APIKey, c.Account, c.AccountGroup, c.Announcement, c.AnnouncementRead,
+ c.Group, c.PromoCode, c.PromoCodeUsage, c.Proxy, c.RedeemCode, c.Setting,
+ c.UsageCleanupTask, c.UsageLog, c.User, c.UserAllowedGroup,
+ c.UserAttributeDefinition, c.UserAttributeValue, c.UserSubscription,
} {
n.Intercept(interceptors...)
}
@@ -302,6 +314,10 @@ func (c *Client) Mutate(ctx context.Context, m Mutation) (Value, error) {
return c.Account.mutate(ctx, m)
case *AccountGroupMutation:
return c.AccountGroup.mutate(ctx, m)
+ case *AnnouncementMutation:
+ return c.Announcement.mutate(ctx, m)
+ case *AnnouncementReadMutation:
+ return c.AnnouncementRead.mutate(ctx, m)
case *GroupMutation:
return c.Group.mutate(ctx, m)
case *PromoCodeMutation:
@@ -831,6 +847,320 @@ func (c *AccountGroupClient) mutate(ctx context.Context, m *AccountGroupMutation
}
}
+// AnnouncementClient is a client for the Announcement schema.
+type AnnouncementClient struct {
+ config
+}
+
+// NewAnnouncementClient returns a client for the Announcement from the given config.
+func NewAnnouncementClient(c config) *AnnouncementClient {
+ return &AnnouncementClient{config: c}
+}
+
+// Use adds a list of mutation hooks to the hooks stack.
+// A call to `Use(f, g, h)` equals to `announcement.Hooks(f(g(h())))`.
+func (c *AnnouncementClient) Use(hooks ...Hook) {
+ c.hooks.Announcement = append(c.hooks.Announcement, hooks...)
+}
+
+// Intercept adds a list of query interceptors to the interceptors stack.
+// A call to `Intercept(f, g, h)` equals to `announcement.Intercept(f(g(h())))`.
+func (c *AnnouncementClient) Intercept(interceptors ...Interceptor) {
+ c.inters.Announcement = append(c.inters.Announcement, interceptors...)
+}
+
+// Create returns a builder for creating a Announcement entity.
+func (c *AnnouncementClient) Create() *AnnouncementCreate {
+ mutation := newAnnouncementMutation(c.config, OpCreate)
+ return &AnnouncementCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
+}
+
+// CreateBulk returns a builder for creating a bulk of Announcement entities.
+func (c *AnnouncementClient) CreateBulk(builders ...*AnnouncementCreate) *AnnouncementCreateBulk {
+ return &AnnouncementCreateBulk{config: c.config, builders: builders}
+}
+
+// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
+// a builder and applies setFunc on it.
+func (c *AnnouncementClient) MapCreateBulk(slice any, setFunc func(*AnnouncementCreate, int)) *AnnouncementCreateBulk {
+ rv := reflect.ValueOf(slice)
+ if rv.Kind() != reflect.Slice {
+ return &AnnouncementCreateBulk{err: fmt.Errorf("calling to AnnouncementClient.MapCreateBulk with wrong type %T, need slice", slice)}
+ }
+ builders := make([]*AnnouncementCreate, rv.Len())
+ for i := 0; i < rv.Len(); i++ {
+ builders[i] = c.Create()
+ setFunc(builders[i], i)
+ }
+ return &AnnouncementCreateBulk{config: c.config, builders: builders}
+}
+
+// Update returns an update builder for Announcement.
+func (c *AnnouncementClient) Update() *AnnouncementUpdate {
+ mutation := newAnnouncementMutation(c.config, OpUpdate)
+ return &AnnouncementUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
+}
+
+// UpdateOne returns an update builder for the given entity.
+func (c *AnnouncementClient) UpdateOne(_m *Announcement) *AnnouncementUpdateOne {
+ mutation := newAnnouncementMutation(c.config, OpUpdateOne, withAnnouncement(_m))
+ return &AnnouncementUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
+}
+
+// UpdateOneID returns an update builder for the given id.
+func (c *AnnouncementClient) UpdateOneID(id int64) *AnnouncementUpdateOne {
+ mutation := newAnnouncementMutation(c.config, OpUpdateOne, withAnnouncementID(id))
+ return &AnnouncementUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
+}
+
+// Delete returns a delete builder for Announcement.
+func (c *AnnouncementClient) Delete() *AnnouncementDelete {
+ mutation := newAnnouncementMutation(c.config, OpDelete)
+ return &AnnouncementDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
+}
+
+// DeleteOne returns a builder for deleting the given entity.
+func (c *AnnouncementClient) DeleteOne(_m *Announcement) *AnnouncementDeleteOne {
+ return c.DeleteOneID(_m.ID)
+}
+
+// DeleteOneID returns a builder for deleting the given entity by its id.
+func (c *AnnouncementClient) DeleteOneID(id int64) *AnnouncementDeleteOne {
+ builder := c.Delete().Where(announcement.ID(id))
+ builder.mutation.id = &id
+ builder.mutation.op = OpDeleteOne
+ return &AnnouncementDeleteOne{builder}
+}
+
+// Query returns a query builder for Announcement.
+func (c *AnnouncementClient) Query() *AnnouncementQuery {
+ return &AnnouncementQuery{
+ config: c.config,
+ ctx: &QueryContext{Type: TypeAnnouncement},
+ inters: c.Interceptors(),
+ }
+}
+
+// Get returns a Announcement entity by its id.
+func (c *AnnouncementClient) Get(ctx context.Context, id int64) (*Announcement, error) {
+ return c.Query().Where(announcement.ID(id)).Only(ctx)
+}
+
+// GetX is like Get, but panics if an error occurs.
+func (c *AnnouncementClient) GetX(ctx context.Context, id int64) *Announcement {
+ obj, err := c.Get(ctx, id)
+ if err != nil {
+ panic(err)
+ }
+ return obj
+}
+
+// QueryReads queries the reads edge of a Announcement.
+func (c *AnnouncementClient) QueryReads(_m *Announcement) *AnnouncementReadQuery {
+ query := (&AnnouncementReadClient{config: c.config}).Query()
+ query.path = func(context.Context) (fromV *sql.Selector, _ error) {
+ id := _m.ID
+ step := sqlgraph.NewStep(
+ sqlgraph.From(announcement.Table, announcement.FieldID, id),
+ sqlgraph.To(announcementread.Table, announcementread.FieldID),
+ sqlgraph.Edge(sqlgraph.O2M, false, announcement.ReadsTable, announcement.ReadsColumn),
+ )
+ fromV = sqlgraph.Neighbors(_m.driver.Dialect(), step)
+ return fromV, nil
+ }
+ return query
+}
+
+// Hooks returns the client hooks.
+func (c *AnnouncementClient) Hooks() []Hook {
+ return c.hooks.Announcement
+}
+
+// Interceptors returns the client interceptors.
+func (c *AnnouncementClient) Interceptors() []Interceptor {
+ return c.inters.Announcement
+}
+
+func (c *AnnouncementClient) mutate(ctx context.Context, m *AnnouncementMutation) (Value, error) {
+ switch m.Op() {
+ case OpCreate:
+ return (&AnnouncementCreate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
+ case OpUpdate:
+ return (&AnnouncementUpdate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
+ case OpUpdateOne:
+ return (&AnnouncementUpdateOne{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
+ case OpDelete, OpDeleteOne:
+ return (&AnnouncementDelete{config: c.config, hooks: c.Hooks(), mutation: m}).Exec(ctx)
+ default:
+ return nil, fmt.Errorf("ent: unknown Announcement mutation op: %q", m.Op())
+ }
+}
+
+// AnnouncementReadClient is a client for the AnnouncementRead schema.
+type AnnouncementReadClient struct {
+ config
+}
+
+// NewAnnouncementReadClient returns a client for the AnnouncementRead from the given config.
+func NewAnnouncementReadClient(c config) *AnnouncementReadClient {
+ return &AnnouncementReadClient{config: c}
+}
+
+// Use adds a list of mutation hooks to the hooks stack.
+// A call to `Use(f, g, h)` equals to `announcementread.Hooks(f(g(h())))`.
+func (c *AnnouncementReadClient) Use(hooks ...Hook) {
+ c.hooks.AnnouncementRead = append(c.hooks.AnnouncementRead, hooks...)
+}
+
+// Intercept adds a list of query interceptors to the interceptors stack.
+// A call to `Intercept(f, g, h)` equals to `announcementread.Intercept(f(g(h())))`.
+func (c *AnnouncementReadClient) Intercept(interceptors ...Interceptor) {
+ c.inters.AnnouncementRead = append(c.inters.AnnouncementRead, interceptors...)
+}
+
+// Create returns a builder for creating a AnnouncementRead entity.
+func (c *AnnouncementReadClient) Create() *AnnouncementReadCreate {
+ mutation := newAnnouncementReadMutation(c.config, OpCreate)
+ return &AnnouncementReadCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
+}
+
+// CreateBulk returns a builder for creating a bulk of AnnouncementRead entities.
+func (c *AnnouncementReadClient) CreateBulk(builders ...*AnnouncementReadCreate) *AnnouncementReadCreateBulk {
+ return &AnnouncementReadCreateBulk{config: c.config, builders: builders}
+}
+
+// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
+// a builder and applies setFunc on it.
+func (c *AnnouncementReadClient) MapCreateBulk(slice any, setFunc func(*AnnouncementReadCreate, int)) *AnnouncementReadCreateBulk {
+ rv := reflect.ValueOf(slice)
+ if rv.Kind() != reflect.Slice {
+ return &AnnouncementReadCreateBulk{err: fmt.Errorf("calling to AnnouncementReadClient.MapCreateBulk with wrong type %T, need slice", slice)}
+ }
+ builders := make([]*AnnouncementReadCreate, rv.Len())
+ for i := 0; i < rv.Len(); i++ {
+ builders[i] = c.Create()
+ setFunc(builders[i], i)
+ }
+ return &AnnouncementReadCreateBulk{config: c.config, builders: builders}
+}
+
+// Update returns an update builder for AnnouncementRead.
+func (c *AnnouncementReadClient) Update() *AnnouncementReadUpdate {
+ mutation := newAnnouncementReadMutation(c.config, OpUpdate)
+ return &AnnouncementReadUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
+}
+
+// UpdateOne returns an update builder for the given entity.
+func (c *AnnouncementReadClient) UpdateOne(_m *AnnouncementRead) *AnnouncementReadUpdateOne {
+ mutation := newAnnouncementReadMutation(c.config, OpUpdateOne, withAnnouncementRead(_m))
+ return &AnnouncementReadUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
+}
+
+// UpdateOneID returns an update builder for the given id.
+func (c *AnnouncementReadClient) UpdateOneID(id int64) *AnnouncementReadUpdateOne {
+ mutation := newAnnouncementReadMutation(c.config, OpUpdateOne, withAnnouncementReadID(id))
+ return &AnnouncementReadUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
+}
+
+// Delete returns a delete builder for AnnouncementRead.
+func (c *AnnouncementReadClient) Delete() *AnnouncementReadDelete {
+ mutation := newAnnouncementReadMutation(c.config, OpDelete)
+ return &AnnouncementReadDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
+}
+
+// DeleteOne returns a builder for deleting the given entity.
+func (c *AnnouncementReadClient) DeleteOne(_m *AnnouncementRead) *AnnouncementReadDeleteOne {
+ return c.DeleteOneID(_m.ID)
+}
+
+// DeleteOneID returns a builder for deleting the given entity by its id.
+func (c *AnnouncementReadClient) DeleteOneID(id int64) *AnnouncementReadDeleteOne {
+ builder := c.Delete().Where(announcementread.ID(id))
+ builder.mutation.id = &id
+ builder.mutation.op = OpDeleteOne
+ return &AnnouncementReadDeleteOne{builder}
+}
+
+// Query returns a query builder for AnnouncementRead.
+func (c *AnnouncementReadClient) Query() *AnnouncementReadQuery {
+ return &AnnouncementReadQuery{
+ config: c.config,
+ ctx: &QueryContext{Type: TypeAnnouncementRead},
+ inters: c.Interceptors(),
+ }
+}
+
+// Get returns a AnnouncementRead entity by its id.
+func (c *AnnouncementReadClient) Get(ctx context.Context, id int64) (*AnnouncementRead, error) {
+ return c.Query().Where(announcementread.ID(id)).Only(ctx)
+}
+
+// GetX is like Get, but panics if an error occurs.
+func (c *AnnouncementReadClient) GetX(ctx context.Context, id int64) *AnnouncementRead {
+ obj, err := c.Get(ctx, id)
+ if err != nil {
+ panic(err)
+ }
+ return obj
+}
+
+// QueryAnnouncement queries the announcement edge of a AnnouncementRead.
+func (c *AnnouncementReadClient) QueryAnnouncement(_m *AnnouncementRead) *AnnouncementQuery {
+ query := (&AnnouncementClient{config: c.config}).Query()
+ query.path = func(context.Context) (fromV *sql.Selector, _ error) {
+ id := _m.ID
+ step := sqlgraph.NewStep(
+ sqlgraph.From(announcementread.Table, announcementread.FieldID, id),
+ sqlgraph.To(announcement.Table, announcement.FieldID),
+ sqlgraph.Edge(sqlgraph.M2O, true, announcementread.AnnouncementTable, announcementread.AnnouncementColumn),
+ )
+ fromV = sqlgraph.Neighbors(_m.driver.Dialect(), step)
+ return fromV, nil
+ }
+ return query
+}
+
+// QueryUser queries the user edge of a AnnouncementRead.
+func (c *AnnouncementReadClient) QueryUser(_m *AnnouncementRead) *UserQuery {
+ query := (&UserClient{config: c.config}).Query()
+ query.path = func(context.Context) (fromV *sql.Selector, _ error) {
+ id := _m.ID
+ step := sqlgraph.NewStep(
+ sqlgraph.From(announcementread.Table, announcementread.FieldID, id),
+ sqlgraph.To(user.Table, user.FieldID),
+ sqlgraph.Edge(sqlgraph.M2O, true, announcementread.UserTable, announcementread.UserColumn),
+ )
+ fromV = sqlgraph.Neighbors(_m.driver.Dialect(), step)
+ return fromV, nil
+ }
+ return query
+}
+
+// Hooks returns the client hooks.
+func (c *AnnouncementReadClient) Hooks() []Hook {
+ return c.hooks.AnnouncementRead
+}
+
+// Interceptors returns the client interceptors.
+func (c *AnnouncementReadClient) Interceptors() []Interceptor {
+ return c.inters.AnnouncementRead
+}
+
+func (c *AnnouncementReadClient) mutate(ctx context.Context, m *AnnouncementReadMutation) (Value, error) {
+ switch m.Op() {
+ case OpCreate:
+ return (&AnnouncementReadCreate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
+ case OpUpdate:
+ return (&AnnouncementReadUpdate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
+ case OpUpdateOne:
+ return (&AnnouncementReadUpdateOne{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
+ case OpDelete, OpDeleteOne:
+ return (&AnnouncementReadDelete{config: c.config, hooks: c.Hooks(), mutation: m}).Exec(ctx)
+ default:
+ return nil, fmt.Errorf("ent: unknown AnnouncementRead mutation op: %q", m.Op())
+ }
+}
+
// GroupClient is a client for the Group schema.
type GroupClient struct {
config
@@ -2375,6 +2705,22 @@ func (c *UserClient) QueryAssignedSubscriptions(_m *User) *UserSubscriptionQuery
return query
}
+// QueryAnnouncementReads queries the announcement_reads edge of a User.
+func (c *UserClient) QueryAnnouncementReads(_m *User) *AnnouncementReadQuery {
+ query := (&AnnouncementReadClient{config: c.config}).Query()
+ query.path = func(context.Context) (fromV *sql.Selector, _ error) {
+ id := _m.ID
+ step := sqlgraph.NewStep(
+ sqlgraph.From(user.Table, user.FieldID, id),
+ sqlgraph.To(announcementread.Table, announcementread.FieldID),
+ sqlgraph.Edge(sqlgraph.O2M, false, user.AnnouncementReadsTable, user.AnnouncementReadsColumn),
+ )
+ fromV = sqlgraph.Neighbors(_m.driver.Dialect(), step)
+ return fromV, nil
+ }
+ return query
+}
+
// QueryAllowedGroups queries the allowed_groups edge of a User.
func (c *UserClient) QueryAllowedGroups(_m *User) *GroupQuery {
query := (&GroupClient{config: c.config}).Query()
@@ -3116,14 +3462,16 @@ func (c *UserSubscriptionClient) mutate(ctx context.Context, m *UserSubscription
// hooks and interceptors per client, for fast access.
type (
hooks struct {
- APIKey, Account, AccountGroup, Group, PromoCode, PromoCodeUsage, Proxy,
- RedeemCode, Setting, UsageCleanupTask, UsageLog, User, UserAllowedGroup,
- UserAttributeDefinition, UserAttributeValue, UserSubscription []ent.Hook
+ APIKey, Account, AccountGroup, Announcement, AnnouncementRead, Group, PromoCode,
+ PromoCodeUsage, Proxy, RedeemCode, Setting, UsageCleanupTask, UsageLog, User,
+ UserAllowedGroup, UserAttributeDefinition, UserAttributeValue,
+ UserSubscription []ent.Hook
}
inters struct {
- APIKey, Account, AccountGroup, Group, PromoCode, PromoCodeUsage, Proxy,
- RedeemCode, Setting, UsageCleanupTask, UsageLog, User, UserAllowedGroup,
- UserAttributeDefinition, UserAttributeValue, UserSubscription []ent.Interceptor
+ APIKey, Account, AccountGroup, Announcement, AnnouncementRead, Group, PromoCode,
+ PromoCodeUsage, Proxy, RedeemCode, Setting, UsageCleanupTask, UsageLog, User,
+ UserAllowedGroup, UserAttributeDefinition, UserAttributeValue,
+ UserSubscription []ent.Interceptor
}
)
diff --git a/backend/ent/ent.go b/backend/ent/ent.go
index 4bcc2642..05e30ba7 100644
--- a/backend/ent/ent.go
+++ b/backend/ent/ent.go
@@ -14,6 +14,8 @@ import (
"entgo.io/ent/dialect/sql/sqlgraph"
"github.com/Wei-Shaw/sub2api/ent/account"
"github.com/Wei-Shaw/sub2api/ent/accountgroup"
+ "github.com/Wei-Shaw/sub2api/ent/announcement"
+ "github.com/Wei-Shaw/sub2api/ent/announcementread"
"github.com/Wei-Shaw/sub2api/ent/apikey"
"github.com/Wei-Shaw/sub2api/ent/group"
"github.com/Wei-Shaw/sub2api/ent/promocode"
@@ -91,6 +93,8 @@ func checkColumn(t, c string) error {
apikey.Table: apikey.ValidColumn,
account.Table: account.ValidColumn,
accountgroup.Table: accountgroup.ValidColumn,
+ announcement.Table: announcement.ValidColumn,
+ announcementread.Table: announcementread.ValidColumn,
group.Table: group.ValidColumn,
promocode.Table: promocode.ValidColumn,
promocodeusage.Table: promocodeusage.ValidColumn,
diff --git a/backend/ent/hook/hook.go b/backend/ent/hook/hook.go
index edd84f5e..1e653c77 100644
--- a/backend/ent/hook/hook.go
+++ b/backend/ent/hook/hook.go
@@ -45,6 +45,30 @@ func (f AccountGroupFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.AccountGroupMutation", m)
}
+// The AnnouncementFunc type is an adapter to allow the use of ordinary
+// function as Announcement mutator.
+type AnnouncementFunc func(context.Context, *ent.AnnouncementMutation) (ent.Value, error)
+
+// Mutate calls f(ctx, m).
+func (f AnnouncementFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
+ if mv, ok := m.(*ent.AnnouncementMutation); ok {
+ return f(ctx, mv)
+ }
+ return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.AnnouncementMutation", m)
+}
+
+// The AnnouncementReadFunc type is an adapter to allow the use of ordinary
+// function as AnnouncementRead mutator.
+type AnnouncementReadFunc func(context.Context, *ent.AnnouncementReadMutation) (ent.Value, error)
+
+// Mutate calls f(ctx, m).
+func (f AnnouncementReadFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
+ if mv, ok := m.(*ent.AnnouncementReadMutation); ok {
+ return f(ctx, mv)
+ }
+ return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.AnnouncementReadMutation", m)
+}
+
// The GroupFunc type is an adapter to allow the use of ordinary
// function as Group mutator.
type GroupFunc func(context.Context, *ent.GroupMutation) (ent.Value, error)
diff --git a/backend/ent/intercept/intercept.go b/backend/ent/intercept/intercept.go
index f18c0624..a37be48f 100644
--- a/backend/ent/intercept/intercept.go
+++ b/backend/ent/intercept/intercept.go
@@ -10,6 +10,8 @@ import (
"github.com/Wei-Shaw/sub2api/ent"
"github.com/Wei-Shaw/sub2api/ent/account"
"github.com/Wei-Shaw/sub2api/ent/accountgroup"
+ "github.com/Wei-Shaw/sub2api/ent/announcement"
+ "github.com/Wei-Shaw/sub2api/ent/announcementread"
"github.com/Wei-Shaw/sub2api/ent/apikey"
"github.com/Wei-Shaw/sub2api/ent/group"
"github.com/Wei-Shaw/sub2api/ent/predicate"
@@ -164,6 +166,60 @@ func (f TraverseAccountGroup) Traverse(ctx context.Context, q ent.Query) error {
return fmt.Errorf("unexpected query type %T. expect *ent.AccountGroupQuery", q)
}
+// The AnnouncementFunc type is an adapter to allow the use of ordinary function as a Querier.
+type AnnouncementFunc func(context.Context, *ent.AnnouncementQuery) (ent.Value, error)
+
+// Query calls f(ctx, q).
+func (f AnnouncementFunc) Query(ctx context.Context, q ent.Query) (ent.Value, error) {
+ if q, ok := q.(*ent.AnnouncementQuery); ok {
+ return f(ctx, q)
+ }
+ return nil, fmt.Errorf("unexpected query type %T. expect *ent.AnnouncementQuery", q)
+}
+
+// The TraverseAnnouncement type is an adapter to allow the use of ordinary function as Traverser.
+type TraverseAnnouncement func(context.Context, *ent.AnnouncementQuery) error
+
+// Intercept is a dummy implementation of Intercept that returns the next Querier in the pipeline.
+func (f TraverseAnnouncement) Intercept(next ent.Querier) ent.Querier {
+ return next
+}
+
+// Traverse calls f(ctx, q).
+func (f TraverseAnnouncement) Traverse(ctx context.Context, q ent.Query) error {
+ if q, ok := q.(*ent.AnnouncementQuery); ok {
+ return f(ctx, q)
+ }
+ return fmt.Errorf("unexpected query type %T. expect *ent.AnnouncementQuery", q)
+}
+
+// The AnnouncementReadFunc type is an adapter to allow the use of ordinary function as a Querier.
+type AnnouncementReadFunc func(context.Context, *ent.AnnouncementReadQuery) (ent.Value, error)
+
+// Query calls f(ctx, q).
+func (f AnnouncementReadFunc) Query(ctx context.Context, q ent.Query) (ent.Value, error) {
+ if q, ok := q.(*ent.AnnouncementReadQuery); ok {
+ return f(ctx, q)
+ }
+ return nil, fmt.Errorf("unexpected query type %T. expect *ent.AnnouncementReadQuery", q)
+}
+
+// The TraverseAnnouncementRead type is an adapter to allow the use of ordinary function as Traverser.
+type TraverseAnnouncementRead func(context.Context, *ent.AnnouncementReadQuery) error
+
+// Intercept is a dummy implementation of Intercept that returns the next Querier in the pipeline.
+func (f TraverseAnnouncementRead) Intercept(next ent.Querier) ent.Querier {
+ return next
+}
+
+// Traverse calls f(ctx, q).
+func (f TraverseAnnouncementRead) Traverse(ctx context.Context, q ent.Query) error {
+ if q, ok := q.(*ent.AnnouncementReadQuery); ok {
+ return f(ctx, q)
+ }
+ return fmt.Errorf("unexpected query type %T. expect *ent.AnnouncementReadQuery", q)
+}
+
// The GroupFunc type is an adapter to allow the use of ordinary function as a Querier.
type GroupFunc func(context.Context, *ent.GroupQuery) (ent.Value, error)
@@ -524,6 +580,10 @@ func NewQuery(q ent.Query) (Query, error) {
return &query[*ent.AccountQuery, predicate.Account, account.OrderOption]{typ: ent.TypeAccount, tq: q}, nil
case *ent.AccountGroupQuery:
return &query[*ent.AccountGroupQuery, predicate.AccountGroup, accountgroup.OrderOption]{typ: ent.TypeAccountGroup, tq: q}, nil
+ case *ent.AnnouncementQuery:
+ return &query[*ent.AnnouncementQuery, predicate.Announcement, announcement.OrderOption]{typ: ent.TypeAnnouncement, tq: q}, nil
+ case *ent.AnnouncementReadQuery:
+ return &query[*ent.AnnouncementReadQuery, predicate.AnnouncementRead, announcementread.OrderOption]{typ: ent.TypeAnnouncementRead, tq: q}, nil
case *ent.GroupQuery:
return &query[*ent.GroupQuery, predicate.Group, group.OrderOption]{typ: ent.TypeGroup, tq: q}, nil
case *ent.PromoCodeQuery:
diff --git a/backend/ent/migrate/schema.go b/backend/ent/migrate/schema.go
index d2a39331..e2ed7340 100644
--- a/backend/ent/migrate/schema.go
+++ b/backend/ent/migrate/schema.go
@@ -204,6 +204,98 @@ var (
},
},
}
+ // AnnouncementsColumns holds the columns for the "announcements" table.
+ AnnouncementsColumns = []*schema.Column{
+ {Name: "id", Type: field.TypeInt64, Increment: true},
+ {Name: "title", Type: field.TypeString, Size: 200},
+ {Name: "content", Type: field.TypeString, SchemaType: map[string]string{"postgres": "text"}},
+ {Name: "status", Type: field.TypeString, Size: 20, Default: "draft"},
+ {Name: "targeting", Type: field.TypeJSON, Nullable: true, SchemaType: map[string]string{"postgres": "jsonb"}},
+ {Name: "starts_at", Type: field.TypeTime, Nullable: true, SchemaType: map[string]string{"postgres": "timestamptz"}},
+ {Name: "ends_at", Type: field.TypeTime, Nullable: true, SchemaType: map[string]string{"postgres": "timestamptz"}},
+ {Name: "created_by", Type: field.TypeInt64, Nullable: true},
+ {Name: "updated_by", Type: field.TypeInt64, Nullable: true},
+ {Name: "created_at", Type: field.TypeTime, SchemaType: map[string]string{"postgres": "timestamptz"}},
+ {Name: "updated_at", Type: field.TypeTime, SchemaType: map[string]string{"postgres": "timestamptz"}},
+ }
+ // AnnouncementsTable holds the schema information for the "announcements" table.
+ AnnouncementsTable = &schema.Table{
+ Name: "announcements",
+ Columns: AnnouncementsColumns,
+ PrimaryKey: []*schema.Column{AnnouncementsColumns[0]},
+ Indexes: []*schema.Index{
+ {
+ Name: "announcement_status",
+ Unique: false,
+ Columns: []*schema.Column{AnnouncementsColumns[3]},
+ },
+ {
+ Name: "announcement_created_at",
+ Unique: false,
+ Columns: []*schema.Column{AnnouncementsColumns[9]},
+ },
+ {
+ Name: "announcement_starts_at",
+ Unique: false,
+ Columns: []*schema.Column{AnnouncementsColumns[5]},
+ },
+ {
+ Name: "announcement_ends_at",
+ Unique: false,
+ Columns: []*schema.Column{AnnouncementsColumns[6]},
+ },
+ },
+ }
+ // AnnouncementReadsColumns holds the columns for the "announcement_reads" table.
+ AnnouncementReadsColumns = []*schema.Column{
+ {Name: "id", Type: field.TypeInt64, Increment: true},
+ {Name: "read_at", Type: field.TypeTime, SchemaType: map[string]string{"postgres": "timestamptz"}},
+ {Name: "created_at", Type: field.TypeTime, SchemaType: map[string]string{"postgres": "timestamptz"}},
+ {Name: "announcement_id", Type: field.TypeInt64},
+ {Name: "user_id", Type: field.TypeInt64},
+ }
+ // AnnouncementReadsTable holds the schema information for the "announcement_reads" table.
+ AnnouncementReadsTable = &schema.Table{
+ Name: "announcement_reads",
+ Columns: AnnouncementReadsColumns,
+ PrimaryKey: []*schema.Column{AnnouncementReadsColumns[0]},
+ ForeignKeys: []*schema.ForeignKey{
+ {
+ Symbol: "announcement_reads_announcements_reads",
+ Columns: []*schema.Column{AnnouncementReadsColumns[3]},
+ RefColumns: []*schema.Column{AnnouncementsColumns[0]},
+ OnDelete: schema.NoAction,
+ },
+ {
+ Symbol: "announcement_reads_users_announcement_reads",
+ Columns: []*schema.Column{AnnouncementReadsColumns[4]},
+ RefColumns: []*schema.Column{UsersColumns[0]},
+ OnDelete: schema.NoAction,
+ },
+ },
+ Indexes: []*schema.Index{
+ {
+ Name: "announcementread_announcement_id",
+ Unique: false,
+ Columns: []*schema.Column{AnnouncementReadsColumns[3]},
+ },
+ {
+ Name: "announcementread_user_id",
+ Unique: false,
+ Columns: []*schema.Column{AnnouncementReadsColumns[4]},
+ },
+ {
+ Name: "announcementread_read_at",
+ Unique: false,
+ Columns: []*schema.Column{AnnouncementReadsColumns[1]},
+ },
+ {
+ Name: "announcementread_announcement_id_user_id",
+ Unique: true,
+ Columns: []*schema.Column{AnnouncementReadsColumns[3], AnnouncementReadsColumns[4]},
+ },
+ },
+ }
// GroupsColumns holds the columns for the "groups" table.
GroupsColumns = []*schema.Column{
{Name: "id", Type: field.TypeInt64, Increment: true},
@@ -840,6 +932,8 @@ var (
APIKeysTable,
AccountsTable,
AccountGroupsTable,
+ AnnouncementsTable,
+ AnnouncementReadsTable,
GroupsTable,
PromoCodesTable,
PromoCodeUsagesTable,
@@ -871,6 +965,14 @@ func init() {
AccountGroupsTable.Annotation = &entsql.Annotation{
Table: "account_groups",
}
+ AnnouncementsTable.Annotation = &entsql.Annotation{
+ Table: "announcements",
+ }
+ AnnouncementReadsTable.ForeignKeys[0].RefTable = AnnouncementsTable
+ AnnouncementReadsTable.ForeignKeys[1].RefTable = UsersTable
+ AnnouncementReadsTable.Annotation = &entsql.Annotation{
+ Table: "announcement_reads",
+ }
GroupsTable.Annotation = &entsql.Annotation{
Table: "groups",
}
diff --git a/backend/ent/mutation.go b/backend/ent/mutation.go
index 7f3071c2..38e0c7e5 100644
--- a/backend/ent/mutation.go
+++ b/backend/ent/mutation.go
@@ -14,6 +14,8 @@ import (
"entgo.io/ent/dialect/sql"
"github.com/Wei-Shaw/sub2api/ent/account"
"github.com/Wei-Shaw/sub2api/ent/accountgroup"
+ "github.com/Wei-Shaw/sub2api/ent/announcement"
+ "github.com/Wei-Shaw/sub2api/ent/announcementread"
"github.com/Wei-Shaw/sub2api/ent/apikey"
"github.com/Wei-Shaw/sub2api/ent/group"
"github.com/Wei-Shaw/sub2api/ent/predicate"
@@ -29,6 +31,7 @@ import (
"github.com/Wei-Shaw/sub2api/ent/userattributedefinition"
"github.com/Wei-Shaw/sub2api/ent/userattributevalue"
"github.com/Wei-Shaw/sub2api/ent/usersubscription"
+ "github.com/Wei-Shaw/sub2api/internal/domain"
)
const (
@@ -43,6 +46,8 @@ const (
TypeAPIKey = "APIKey"
TypeAccount = "Account"
TypeAccountGroup = "AccountGroup"
+ TypeAnnouncement = "Announcement"
+ TypeAnnouncementRead = "AnnouncementRead"
TypeGroup = "Group"
TypePromoCode = "PromoCode"
TypePromoCodeUsage = "PromoCodeUsage"
@@ -3833,6 +3838,1671 @@ func (m *AccountGroupMutation) ResetEdge(name string) error {
return fmt.Errorf("unknown AccountGroup edge %s", name)
}
+// AnnouncementMutation represents an operation that mutates the Announcement nodes in the graph.
+type AnnouncementMutation struct {
+ config
+ op Op
+ typ string
+ id *int64
+ title *string
+ content *string
+ status *string
+ targeting *domain.AnnouncementTargeting
+ starts_at *time.Time
+ ends_at *time.Time
+ created_by *int64
+ addcreated_by *int64
+ updated_by *int64
+ addupdated_by *int64
+ created_at *time.Time
+ updated_at *time.Time
+ clearedFields map[string]struct{}
+ reads map[int64]struct{}
+ removedreads map[int64]struct{}
+ clearedreads bool
+ done bool
+ oldValue func(context.Context) (*Announcement, error)
+ predicates []predicate.Announcement
+}
+
+var _ ent.Mutation = (*AnnouncementMutation)(nil)
+
+// announcementOption allows management of the mutation configuration using functional options.
+type announcementOption func(*AnnouncementMutation)
+
+// newAnnouncementMutation creates new mutation for the Announcement entity.
+func newAnnouncementMutation(c config, op Op, opts ...announcementOption) *AnnouncementMutation {
+ m := &AnnouncementMutation{
+ config: c,
+ op: op,
+ typ: TypeAnnouncement,
+ clearedFields: make(map[string]struct{}),
+ }
+ for _, opt := range opts {
+ opt(m)
+ }
+ return m
+}
+
+// withAnnouncementID sets the ID field of the mutation.
+func withAnnouncementID(id int64) announcementOption {
+ return func(m *AnnouncementMutation) {
+ var (
+ err error
+ once sync.Once
+ value *Announcement
+ )
+ m.oldValue = func(ctx context.Context) (*Announcement, error) {
+ once.Do(func() {
+ if m.done {
+ err = errors.New("querying old values post mutation is not allowed")
+ } else {
+ value, err = m.Client().Announcement.Get(ctx, id)
+ }
+ })
+ return value, err
+ }
+ m.id = &id
+ }
+}
+
+// withAnnouncement sets the old Announcement of the mutation.
+func withAnnouncement(node *Announcement) announcementOption {
+ return func(m *AnnouncementMutation) {
+ m.oldValue = func(context.Context) (*Announcement, error) {
+ return node, nil
+ }
+ m.id = &node.ID
+ }
+}
+
+// Client returns a new `ent.Client` from the mutation. If the mutation was
+// executed in a transaction (ent.Tx), a transactional client is returned.
+func (m AnnouncementMutation) Client() *Client {
+ client := &Client{config: m.config}
+ client.init()
+ return client
+}
+
+// Tx returns an `ent.Tx` for mutations that were executed in transactions;
+// it returns an error otherwise.
+func (m AnnouncementMutation) Tx() (*Tx, error) {
+ if _, ok := m.driver.(*txDriver); !ok {
+ return nil, errors.New("ent: mutation is not running in a transaction")
+ }
+ tx := &Tx{config: m.config}
+ tx.init()
+ return tx, nil
+}
+
+// ID returns the ID value in the mutation. Note that the ID is only available
+// if it was provided to the builder or after it was returned from the database.
+func (m *AnnouncementMutation) ID() (id int64, exists bool) {
+ if m.id == nil {
+ return
+ }
+ return *m.id, true
+}
+
+// IDs queries the database and returns the entity ids that match the mutation's predicate.
+// That means, if the mutation is applied within a transaction with an isolation level such
+// as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated
+// or updated by the mutation.
+func (m *AnnouncementMutation) IDs(ctx context.Context) ([]int64, error) {
+ switch {
+ case m.op.Is(OpUpdateOne | OpDeleteOne):
+ id, exists := m.ID()
+ if exists {
+ return []int64{id}, nil
+ }
+ fallthrough
+ case m.op.Is(OpUpdate | OpDelete):
+ return m.Client().Announcement.Query().Where(m.predicates...).IDs(ctx)
+ default:
+ return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op)
+ }
+}
+
+// SetTitle sets the "title" field.
+func (m *AnnouncementMutation) SetTitle(s string) {
+ m.title = &s
+}
+
+// Title returns the value of the "title" field in the mutation.
+func (m *AnnouncementMutation) Title() (r string, exists bool) {
+ v := m.title
+ if v == nil {
+ return
+ }
+ return *v, true
+}
+
+// OldTitle returns the old "title" field's value of the Announcement entity.
+// If the Announcement object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *AnnouncementMutation) OldTitle(ctx context.Context) (v string, err error) {
+ if !m.op.Is(OpUpdateOne) {
+ return v, errors.New("OldTitle is only allowed on UpdateOne operations")
+ }
+ if m.id == nil || m.oldValue == nil {
+ return v, errors.New("OldTitle requires an ID field in the mutation")
+ }
+ oldValue, err := m.oldValue(ctx)
+ if err != nil {
+ return v, fmt.Errorf("querying old value for OldTitle: %w", err)
+ }
+ return oldValue.Title, nil
+}
+
+// ResetTitle resets all changes to the "title" field.
+func (m *AnnouncementMutation) ResetTitle() {
+ m.title = nil
+}
+
+// SetContent sets the "content" field.
+func (m *AnnouncementMutation) SetContent(s string) {
+ m.content = &s
+}
+
+// Content returns the value of the "content" field in the mutation.
+func (m *AnnouncementMutation) Content() (r string, exists bool) {
+ v := m.content
+ if v == nil {
+ return
+ }
+ return *v, true
+}
+
+// OldContent returns the old "content" field's value of the Announcement entity.
+// If the Announcement object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *AnnouncementMutation) OldContent(ctx context.Context) (v string, err error) {
+ if !m.op.Is(OpUpdateOne) {
+ return v, errors.New("OldContent is only allowed on UpdateOne operations")
+ }
+ if m.id == nil || m.oldValue == nil {
+ return v, errors.New("OldContent requires an ID field in the mutation")
+ }
+ oldValue, err := m.oldValue(ctx)
+ if err != nil {
+ return v, fmt.Errorf("querying old value for OldContent: %w", err)
+ }
+ return oldValue.Content, nil
+}
+
+// ResetContent resets all changes to the "content" field.
+func (m *AnnouncementMutation) ResetContent() {
+ m.content = nil
+}
+
+// SetStatus sets the "status" field.
+func (m *AnnouncementMutation) SetStatus(s string) {
+ m.status = &s
+}
+
+// Status returns the value of the "status" field in the mutation.
+func (m *AnnouncementMutation) Status() (r string, exists bool) {
+ v := m.status
+ if v == nil {
+ return
+ }
+ return *v, true
+}
+
+// OldStatus returns the old "status" field's value of the Announcement entity.
+// If the Announcement object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *AnnouncementMutation) OldStatus(ctx context.Context) (v string, err error) {
+ if !m.op.Is(OpUpdateOne) {
+ return v, errors.New("OldStatus is only allowed on UpdateOne operations")
+ }
+ if m.id == nil || m.oldValue == nil {
+ return v, errors.New("OldStatus requires an ID field in the mutation")
+ }
+ oldValue, err := m.oldValue(ctx)
+ if err != nil {
+ return v, fmt.Errorf("querying old value for OldStatus: %w", err)
+ }
+ return oldValue.Status, nil
+}
+
+// ResetStatus resets all changes to the "status" field.
+func (m *AnnouncementMutation) ResetStatus() {
+ m.status = nil
+}
+
+// SetTargeting sets the "targeting" field.
+func (m *AnnouncementMutation) SetTargeting(dt domain.AnnouncementTargeting) {
+ m.targeting = &dt
+}
+
+// Targeting returns the value of the "targeting" field in the mutation.
+func (m *AnnouncementMutation) Targeting() (r domain.AnnouncementTargeting, exists bool) {
+ v := m.targeting
+ if v == nil {
+ return
+ }
+ return *v, true
+}
+
+// OldTargeting returns the old "targeting" field's value of the Announcement entity.
+// If the Announcement object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *AnnouncementMutation) OldTargeting(ctx context.Context) (v domain.AnnouncementTargeting, err error) {
+ if !m.op.Is(OpUpdateOne) {
+ return v, errors.New("OldTargeting is only allowed on UpdateOne operations")
+ }
+ if m.id == nil || m.oldValue == nil {
+ return v, errors.New("OldTargeting requires an ID field in the mutation")
+ }
+ oldValue, err := m.oldValue(ctx)
+ if err != nil {
+ return v, fmt.Errorf("querying old value for OldTargeting: %w", err)
+ }
+ return oldValue.Targeting, nil
+}
+
+// ClearTargeting clears the value of the "targeting" field.
+func (m *AnnouncementMutation) ClearTargeting() {
+ m.targeting = nil
+ m.clearedFields[announcement.FieldTargeting] = struct{}{}
+}
+
+// TargetingCleared returns if the "targeting" field was cleared in this mutation.
+func (m *AnnouncementMutation) TargetingCleared() bool {
+ _, ok := m.clearedFields[announcement.FieldTargeting]
+ return ok
+}
+
+// ResetTargeting resets all changes to the "targeting" field.
+func (m *AnnouncementMutation) ResetTargeting() {
+ m.targeting = nil
+ delete(m.clearedFields, announcement.FieldTargeting)
+}
+
+// SetStartsAt sets the "starts_at" field.
+func (m *AnnouncementMutation) SetStartsAt(t time.Time) {
+ m.starts_at = &t
+}
+
+// StartsAt returns the value of the "starts_at" field in the mutation.
+func (m *AnnouncementMutation) StartsAt() (r time.Time, exists bool) {
+ v := m.starts_at
+ if v == nil {
+ return
+ }
+ return *v, true
+}
+
+// OldStartsAt returns the old "starts_at" field's value of the Announcement entity.
+// If the Announcement object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *AnnouncementMutation) OldStartsAt(ctx context.Context) (v *time.Time, err error) {
+ if !m.op.Is(OpUpdateOne) {
+ return v, errors.New("OldStartsAt is only allowed on UpdateOne operations")
+ }
+ if m.id == nil || m.oldValue == nil {
+ return v, errors.New("OldStartsAt requires an ID field in the mutation")
+ }
+ oldValue, err := m.oldValue(ctx)
+ if err != nil {
+ return v, fmt.Errorf("querying old value for OldStartsAt: %w", err)
+ }
+ return oldValue.StartsAt, nil
+}
+
+// ClearStartsAt clears the value of the "starts_at" field.
+func (m *AnnouncementMutation) ClearStartsAt() {
+ m.starts_at = nil
+ m.clearedFields[announcement.FieldStartsAt] = struct{}{}
+}
+
+// StartsAtCleared returns if the "starts_at" field was cleared in this mutation.
+func (m *AnnouncementMutation) StartsAtCleared() bool {
+ _, ok := m.clearedFields[announcement.FieldStartsAt]
+ return ok
+}
+
+// ResetStartsAt resets all changes to the "starts_at" field.
+func (m *AnnouncementMutation) ResetStartsAt() {
+ m.starts_at = nil
+ delete(m.clearedFields, announcement.FieldStartsAt)
+}
+
+// SetEndsAt sets the "ends_at" field.
+func (m *AnnouncementMutation) SetEndsAt(t time.Time) {
+ m.ends_at = &t
+}
+
+// EndsAt returns the value of the "ends_at" field in the mutation.
+func (m *AnnouncementMutation) EndsAt() (r time.Time, exists bool) {
+ v := m.ends_at
+ if v == nil {
+ return
+ }
+ return *v, true
+}
+
+// OldEndsAt returns the old "ends_at" field's value of the Announcement entity.
+// If the Announcement object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *AnnouncementMutation) OldEndsAt(ctx context.Context) (v *time.Time, err error) {
+ if !m.op.Is(OpUpdateOne) {
+ return v, errors.New("OldEndsAt is only allowed on UpdateOne operations")
+ }
+ if m.id == nil || m.oldValue == nil {
+ return v, errors.New("OldEndsAt requires an ID field in the mutation")
+ }
+ oldValue, err := m.oldValue(ctx)
+ if err != nil {
+ return v, fmt.Errorf("querying old value for OldEndsAt: %w", err)
+ }
+ return oldValue.EndsAt, nil
+}
+
+// ClearEndsAt clears the value of the "ends_at" field.
+func (m *AnnouncementMutation) ClearEndsAt() {
+ m.ends_at = nil
+ m.clearedFields[announcement.FieldEndsAt] = struct{}{}
+}
+
+// EndsAtCleared returns if the "ends_at" field was cleared in this mutation.
+func (m *AnnouncementMutation) EndsAtCleared() bool {
+ _, ok := m.clearedFields[announcement.FieldEndsAt]
+ return ok
+}
+
+// ResetEndsAt resets all changes to the "ends_at" field.
+func (m *AnnouncementMutation) ResetEndsAt() {
+ m.ends_at = nil
+ delete(m.clearedFields, announcement.FieldEndsAt)
+}
+
+// SetCreatedBy sets the "created_by" field.
+func (m *AnnouncementMutation) SetCreatedBy(i int64) {
+ m.created_by = &i
+ m.addcreated_by = nil
+}
+
+// CreatedBy returns the value of the "created_by" field in the mutation.
+func (m *AnnouncementMutation) CreatedBy() (r int64, exists bool) {
+ v := m.created_by
+ if v == nil {
+ return
+ }
+ return *v, true
+}
+
+// OldCreatedBy returns the old "created_by" field's value of the Announcement entity.
+// If the Announcement object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *AnnouncementMutation) OldCreatedBy(ctx context.Context) (v *int64, err error) {
+ if !m.op.Is(OpUpdateOne) {
+ return v, errors.New("OldCreatedBy is only allowed on UpdateOne operations")
+ }
+ if m.id == nil || m.oldValue == nil {
+ return v, errors.New("OldCreatedBy requires an ID field in the mutation")
+ }
+ oldValue, err := m.oldValue(ctx)
+ if err != nil {
+ return v, fmt.Errorf("querying old value for OldCreatedBy: %w", err)
+ }
+ return oldValue.CreatedBy, nil
+}
+
+// AddCreatedBy adds i to the "created_by" field.
+func (m *AnnouncementMutation) AddCreatedBy(i int64) {
+ if m.addcreated_by != nil {
+ *m.addcreated_by += i
+ } else {
+ m.addcreated_by = &i
+ }
+}
+
+// AddedCreatedBy returns the value that was added to the "created_by" field in this mutation.
+func (m *AnnouncementMutation) AddedCreatedBy() (r int64, exists bool) {
+ v := m.addcreated_by
+ if v == nil {
+ return
+ }
+ return *v, true
+}
+
+// ClearCreatedBy clears the value of the "created_by" field.
+func (m *AnnouncementMutation) ClearCreatedBy() {
+ m.created_by = nil
+ m.addcreated_by = nil
+ m.clearedFields[announcement.FieldCreatedBy] = struct{}{}
+}
+
+// CreatedByCleared returns if the "created_by" field was cleared in this mutation.
+func (m *AnnouncementMutation) CreatedByCleared() bool {
+ _, ok := m.clearedFields[announcement.FieldCreatedBy]
+ return ok
+}
+
+// ResetCreatedBy resets all changes to the "created_by" field.
+func (m *AnnouncementMutation) ResetCreatedBy() {
+ m.created_by = nil
+ m.addcreated_by = nil
+ delete(m.clearedFields, announcement.FieldCreatedBy)
+}
+
+// SetUpdatedBy sets the "updated_by" field.
+func (m *AnnouncementMutation) SetUpdatedBy(i int64) {
+ m.updated_by = &i
+ m.addupdated_by = nil
+}
+
+// UpdatedBy returns the value of the "updated_by" field in the mutation.
+func (m *AnnouncementMutation) UpdatedBy() (r int64, exists bool) {
+ v := m.updated_by
+ if v == nil {
+ return
+ }
+ return *v, true
+}
+
+// OldUpdatedBy returns the old "updated_by" field's value of the Announcement entity.
+// If the Announcement object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *AnnouncementMutation) OldUpdatedBy(ctx context.Context) (v *int64, err error) {
+ if !m.op.Is(OpUpdateOne) {
+ return v, errors.New("OldUpdatedBy is only allowed on UpdateOne operations")
+ }
+ if m.id == nil || m.oldValue == nil {
+ return v, errors.New("OldUpdatedBy requires an ID field in the mutation")
+ }
+ oldValue, err := m.oldValue(ctx)
+ if err != nil {
+ return v, fmt.Errorf("querying old value for OldUpdatedBy: %w", err)
+ }
+ return oldValue.UpdatedBy, nil
+}
+
+// AddUpdatedBy adds i to the "updated_by" field.
+func (m *AnnouncementMutation) AddUpdatedBy(i int64) {
+ if m.addupdated_by != nil {
+ *m.addupdated_by += i
+ } else {
+ m.addupdated_by = &i
+ }
+}
+
+// AddedUpdatedBy returns the value that was added to the "updated_by" field in this mutation.
+func (m *AnnouncementMutation) AddedUpdatedBy() (r int64, exists bool) {
+ v := m.addupdated_by
+ if v == nil {
+ return
+ }
+ return *v, true
+}
+
+// ClearUpdatedBy clears the value of the "updated_by" field.
+func (m *AnnouncementMutation) ClearUpdatedBy() {
+ m.updated_by = nil
+ m.addupdated_by = nil
+ m.clearedFields[announcement.FieldUpdatedBy] = struct{}{}
+}
+
+// UpdatedByCleared returns if the "updated_by" field was cleared in this mutation.
+func (m *AnnouncementMutation) UpdatedByCleared() bool {
+ _, ok := m.clearedFields[announcement.FieldUpdatedBy]
+ return ok
+}
+
+// ResetUpdatedBy resets all changes to the "updated_by" field.
+func (m *AnnouncementMutation) ResetUpdatedBy() {
+ m.updated_by = nil
+ m.addupdated_by = nil
+ delete(m.clearedFields, announcement.FieldUpdatedBy)
+}
+
+// SetCreatedAt sets the "created_at" field.
+func (m *AnnouncementMutation) SetCreatedAt(t time.Time) {
+ m.created_at = &t
+}
+
+// CreatedAt returns the value of the "created_at" field in the mutation.
+func (m *AnnouncementMutation) CreatedAt() (r time.Time, exists bool) {
+ v := m.created_at
+ if v == nil {
+ return
+ }
+ return *v, true
+}
+
+// OldCreatedAt returns the old "created_at" field's value of the Announcement entity.
+// If the Announcement object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *AnnouncementMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) {
+ if !m.op.Is(OpUpdateOne) {
+ return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations")
+ }
+ if m.id == nil || m.oldValue == nil {
+ return v, errors.New("OldCreatedAt requires an ID field in the mutation")
+ }
+ oldValue, err := m.oldValue(ctx)
+ if err != nil {
+ return v, fmt.Errorf("querying old value for OldCreatedAt: %w", err)
+ }
+ return oldValue.CreatedAt, nil
+}
+
+// ResetCreatedAt resets all changes to the "created_at" field.
+func (m *AnnouncementMutation) ResetCreatedAt() {
+ m.created_at = nil
+}
+
+// SetUpdatedAt sets the "updated_at" field.
+func (m *AnnouncementMutation) SetUpdatedAt(t time.Time) {
+ m.updated_at = &t
+}
+
+// UpdatedAt returns the value of the "updated_at" field in the mutation.
+func (m *AnnouncementMutation) UpdatedAt() (r time.Time, exists bool) {
+ v := m.updated_at
+ if v == nil {
+ return
+ }
+ return *v, true
+}
+
+// OldUpdatedAt returns the old "updated_at" field's value of the Announcement entity.
+// If the Announcement object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *AnnouncementMutation) OldUpdatedAt(ctx context.Context) (v time.Time, err error) {
+ if !m.op.Is(OpUpdateOne) {
+ return v, errors.New("OldUpdatedAt is only allowed on UpdateOne operations")
+ }
+ if m.id == nil || m.oldValue == nil {
+ return v, errors.New("OldUpdatedAt requires an ID field in the mutation")
+ }
+ oldValue, err := m.oldValue(ctx)
+ if err != nil {
+ return v, fmt.Errorf("querying old value for OldUpdatedAt: %w", err)
+ }
+ return oldValue.UpdatedAt, nil
+}
+
+// ResetUpdatedAt resets all changes to the "updated_at" field.
+func (m *AnnouncementMutation) ResetUpdatedAt() {
+ m.updated_at = nil
+}
+
+// AddReadIDs adds the "reads" edge to the AnnouncementRead entity by ids.
+func (m *AnnouncementMutation) AddReadIDs(ids ...int64) {
+ if m.reads == nil {
+ m.reads = make(map[int64]struct{})
+ }
+ for i := range ids {
+ m.reads[ids[i]] = struct{}{}
+ }
+}
+
+// ClearReads clears the "reads" edge to the AnnouncementRead entity.
+func (m *AnnouncementMutation) ClearReads() {
+ m.clearedreads = true
+}
+
+// ReadsCleared reports if the "reads" edge to the AnnouncementRead entity was cleared.
+func (m *AnnouncementMutation) ReadsCleared() bool {
+ return m.clearedreads
+}
+
+// RemoveReadIDs removes the "reads" edge to the AnnouncementRead entity by IDs.
+func (m *AnnouncementMutation) RemoveReadIDs(ids ...int64) {
+ if m.removedreads == nil {
+ m.removedreads = make(map[int64]struct{})
+ }
+ for i := range ids {
+ delete(m.reads, ids[i])
+ m.removedreads[ids[i]] = struct{}{}
+ }
+}
+
+// RemovedReads returns the removed IDs of the "reads" edge to the AnnouncementRead entity.
+func (m *AnnouncementMutation) RemovedReadsIDs() (ids []int64) {
+ for id := range m.removedreads {
+ ids = append(ids, id)
+ }
+ return
+}
+
+// ReadsIDs returns the "reads" edge IDs in the mutation.
+func (m *AnnouncementMutation) ReadsIDs() (ids []int64) {
+ for id := range m.reads {
+ ids = append(ids, id)
+ }
+ return
+}
+
+// ResetReads resets all changes to the "reads" edge.
+func (m *AnnouncementMutation) ResetReads() {
+ m.reads = nil
+ m.clearedreads = false
+ m.removedreads = nil
+}
+
+// Where appends a list predicates to the AnnouncementMutation builder.
+func (m *AnnouncementMutation) Where(ps ...predicate.Announcement) {
+ m.predicates = append(m.predicates, ps...)
+}
+
+// WhereP appends storage-level predicates to the AnnouncementMutation builder. Using this method,
+// users can use type-assertion to append predicates that do not depend on any generated package.
+func (m *AnnouncementMutation) WhereP(ps ...func(*sql.Selector)) {
+ p := make([]predicate.Announcement, len(ps))
+ for i := range ps {
+ p[i] = ps[i]
+ }
+ m.Where(p...)
+}
+
+// Op returns the operation name.
+func (m *AnnouncementMutation) Op() Op {
+ return m.op
+}
+
+// SetOp allows setting the mutation operation.
+func (m *AnnouncementMutation) SetOp(op Op) {
+ m.op = op
+}
+
+// Type returns the node type of this mutation (Announcement).
+func (m *AnnouncementMutation) Type() string {
+ return m.typ
+}
+
+// Fields returns all fields that were changed during this mutation. Note that in
+// order to get all numeric fields that were incremented/decremented, call
+// AddedFields().
+func (m *AnnouncementMutation) Fields() []string {
+ fields := make([]string, 0, 10)
+ if m.title != nil {
+ fields = append(fields, announcement.FieldTitle)
+ }
+ if m.content != nil {
+ fields = append(fields, announcement.FieldContent)
+ }
+ if m.status != nil {
+ fields = append(fields, announcement.FieldStatus)
+ }
+ if m.targeting != nil {
+ fields = append(fields, announcement.FieldTargeting)
+ }
+ if m.starts_at != nil {
+ fields = append(fields, announcement.FieldStartsAt)
+ }
+ if m.ends_at != nil {
+ fields = append(fields, announcement.FieldEndsAt)
+ }
+ if m.created_by != nil {
+ fields = append(fields, announcement.FieldCreatedBy)
+ }
+ if m.updated_by != nil {
+ fields = append(fields, announcement.FieldUpdatedBy)
+ }
+ if m.created_at != nil {
+ fields = append(fields, announcement.FieldCreatedAt)
+ }
+ if m.updated_at != nil {
+ fields = append(fields, announcement.FieldUpdatedAt)
+ }
+ return fields
+}
+
+// Field returns the value of a field with the given name. The second boolean
+// return value indicates that this field was not set, or was not defined in the
+// schema.
+func (m *AnnouncementMutation) Field(name string) (ent.Value, bool) {
+ switch name {
+ case announcement.FieldTitle:
+ return m.Title()
+ case announcement.FieldContent:
+ return m.Content()
+ case announcement.FieldStatus:
+ return m.Status()
+ case announcement.FieldTargeting:
+ return m.Targeting()
+ case announcement.FieldStartsAt:
+ return m.StartsAt()
+ case announcement.FieldEndsAt:
+ return m.EndsAt()
+ case announcement.FieldCreatedBy:
+ return m.CreatedBy()
+ case announcement.FieldUpdatedBy:
+ return m.UpdatedBy()
+ case announcement.FieldCreatedAt:
+ return m.CreatedAt()
+ case announcement.FieldUpdatedAt:
+ return m.UpdatedAt()
+ }
+ return nil, false
+}
+
+// OldField returns the old value of the field from the database. An error is
+// returned if the mutation operation is not UpdateOne, or the query to the
+// database failed.
+func (m *AnnouncementMutation) OldField(ctx context.Context, name string) (ent.Value, error) {
+ switch name {
+ case announcement.FieldTitle:
+ return m.OldTitle(ctx)
+ case announcement.FieldContent:
+ return m.OldContent(ctx)
+ case announcement.FieldStatus:
+ return m.OldStatus(ctx)
+ case announcement.FieldTargeting:
+ return m.OldTargeting(ctx)
+ case announcement.FieldStartsAt:
+ return m.OldStartsAt(ctx)
+ case announcement.FieldEndsAt:
+ return m.OldEndsAt(ctx)
+ case announcement.FieldCreatedBy:
+ return m.OldCreatedBy(ctx)
+ case announcement.FieldUpdatedBy:
+ return m.OldUpdatedBy(ctx)
+ case announcement.FieldCreatedAt:
+ return m.OldCreatedAt(ctx)
+ case announcement.FieldUpdatedAt:
+ return m.OldUpdatedAt(ctx)
+ }
+ return nil, fmt.Errorf("unknown Announcement field %s", name)
+}
+
+// SetField sets the value of a field with the given name. It returns an error if
+// the field is not defined in the schema, or if the type mismatched the field
+// type.
+func (m *AnnouncementMutation) SetField(name string, value ent.Value) error {
+ switch name {
+ case announcement.FieldTitle:
+ v, ok := value.(string)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field %s", value, name)
+ }
+ m.SetTitle(v)
+ return nil
+ case announcement.FieldContent:
+ v, ok := value.(string)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field %s", value, name)
+ }
+ m.SetContent(v)
+ return nil
+ case announcement.FieldStatus:
+ v, ok := value.(string)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field %s", value, name)
+ }
+ m.SetStatus(v)
+ return nil
+ case announcement.FieldTargeting:
+ v, ok := value.(domain.AnnouncementTargeting)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field %s", value, name)
+ }
+ m.SetTargeting(v)
+ return nil
+ case announcement.FieldStartsAt:
+ v, ok := value.(time.Time)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field %s", value, name)
+ }
+ m.SetStartsAt(v)
+ return nil
+ case announcement.FieldEndsAt:
+ v, ok := value.(time.Time)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field %s", value, name)
+ }
+ m.SetEndsAt(v)
+ return nil
+ case announcement.FieldCreatedBy:
+ v, ok := value.(int64)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field %s", value, name)
+ }
+ m.SetCreatedBy(v)
+ return nil
+ case announcement.FieldUpdatedBy:
+ v, ok := value.(int64)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field %s", value, name)
+ }
+ m.SetUpdatedBy(v)
+ return nil
+ case announcement.FieldCreatedAt:
+ v, ok := value.(time.Time)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field %s", value, name)
+ }
+ m.SetCreatedAt(v)
+ return nil
+ case announcement.FieldUpdatedAt:
+ v, ok := value.(time.Time)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field %s", value, name)
+ }
+ m.SetUpdatedAt(v)
+ return nil
+ }
+ return fmt.Errorf("unknown Announcement field %s", name)
+}
+
+// AddedFields returns all numeric fields that were incremented/decremented during
+// this mutation.
+func (m *AnnouncementMutation) AddedFields() []string {
+ var fields []string
+ if m.addcreated_by != nil {
+ fields = append(fields, announcement.FieldCreatedBy)
+ }
+ if m.addupdated_by != nil {
+ fields = append(fields, announcement.FieldUpdatedBy)
+ }
+ return fields
+}
+
+// AddedField returns the numeric value that was incremented/decremented on a field
+// with the given name. The second boolean return value indicates that this field
+// was not set, or was not defined in the schema.
+func (m *AnnouncementMutation) AddedField(name string) (ent.Value, bool) {
+ switch name {
+ case announcement.FieldCreatedBy:
+ return m.AddedCreatedBy()
+ case announcement.FieldUpdatedBy:
+ return m.AddedUpdatedBy()
+ }
+ return nil, false
+}
+
+// AddField adds the value to the field with the given name. It returns an error if
+// the field is not defined in the schema, or if the type mismatched the field
+// type.
+func (m *AnnouncementMutation) AddField(name string, value ent.Value) error {
+ switch name {
+ case announcement.FieldCreatedBy:
+ v, ok := value.(int64)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field %s", value, name)
+ }
+ m.AddCreatedBy(v)
+ return nil
+ case announcement.FieldUpdatedBy:
+ v, ok := value.(int64)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field %s", value, name)
+ }
+ m.AddUpdatedBy(v)
+ return nil
+ }
+ return fmt.Errorf("unknown Announcement numeric field %s", name)
+}
+
+// ClearedFields returns all nullable fields that were cleared during this
+// mutation.
+func (m *AnnouncementMutation) ClearedFields() []string {
+ var fields []string
+ if m.FieldCleared(announcement.FieldTargeting) {
+ fields = append(fields, announcement.FieldTargeting)
+ }
+ if m.FieldCleared(announcement.FieldStartsAt) {
+ fields = append(fields, announcement.FieldStartsAt)
+ }
+ if m.FieldCleared(announcement.FieldEndsAt) {
+ fields = append(fields, announcement.FieldEndsAt)
+ }
+ if m.FieldCleared(announcement.FieldCreatedBy) {
+ fields = append(fields, announcement.FieldCreatedBy)
+ }
+ if m.FieldCleared(announcement.FieldUpdatedBy) {
+ fields = append(fields, announcement.FieldUpdatedBy)
+ }
+ return fields
+}
+
+// FieldCleared returns a boolean indicating if a field with the given name was
+// cleared in this mutation.
+func (m *AnnouncementMutation) FieldCleared(name string) bool {
+ _, ok := m.clearedFields[name]
+ return ok
+}
+
+// ClearField clears the value of the field with the given name. It returns an
+// error if the field is not defined in the schema.
+func (m *AnnouncementMutation) ClearField(name string) error {
+ switch name {
+ case announcement.FieldTargeting:
+ m.ClearTargeting()
+ return nil
+ case announcement.FieldStartsAt:
+ m.ClearStartsAt()
+ return nil
+ case announcement.FieldEndsAt:
+ m.ClearEndsAt()
+ return nil
+ case announcement.FieldCreatedBy:
+ m.ClearCreatedBy()
+ return nil
+ case announcement.FieldUpdatedBy:
+ m.ClearUpdatedBy()
+ return nil
+ }
+ return fmt.Errorf("unknown Announcement nullable field %s", name)
+}
+
+// ResetField resets all changes in the mutation for the field with the given name.
+// It returns an error if the field is not defined in the schema.
+func (m *AnnouncementMutation) ResetField(name string) error {
+ switch name {
+ case announcement.FieldTitle:
+ m.ResetTitle()
+ return nil
+ case announcement.FieldContent:
+ m.ResetContent()
+ return nil
+ case announcement.FieldStatus:
+ m.ResetStatus()
+ return nil
+ case announcement.FieldTargeting:
+ m.ResetTargeting()
+ return nil
+ case announcement.FieldStartsAt:
+ m.ResetStartsAt()
+ return nil
+ case announcement.FieldEndsAt:
+ m.ResetEndsAt()
+ return nil
+ case announcement.FieldCreatedBy:
+ m.ResetCreatedBy()
+ return nil
+ case announcement.FieldUpdatedBy:
+ m.ResetUpdatedBy()
+ return nil
+ case announcement.FieldCreatedAt:
+ m.ResetCreatedAt()
+ return nil
+ case announcement.FieldUpdatedAt:
+ m.ResetUpdatedAt()
+ return nil
+ }
+ return fmt.Errorf("unknown Announcement field %s", name)
+}
+
+// AddedEdges returns all edge names that were set/added in this mutation.
+func (m *AnnouncementMutation) AddedEdges() []string {
+ edges := make([]string, 0, 1)
+ if m.reads != nil {
+ edges = append(edges, announcement.EdgeReads)
+ }
+ return edges
+}
+
+// AddedIDs returns all IDs (to other nodes) that were added for the given edge
+// name in this mutation.
+func (m *AnnouncementMutation) AddedIDs(name string) []ent.Value {
+ switch name {
+ case announcement.EdgeReads:
+ ids := make([]ent.Value, 0, len(m.reads))
+ for id := range m.reads {
+ ids = append(ids, id)
+ }
+ return ids
+ }
+ return nil
+}
+
+// RemovedEdges returns all edge names that were removed in this mutation.
+func (m *AnnouncementMutation) RemovedEdges() []string {
+ edges := make([]string, 0, 1)
+ if m.removedreads != nil {
+ edges = append(edges, announcement.EdgeReads)
+ }
+ return edges
+}
+
+// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with
+// the given name in this mutation.
+func (m *AnnouncementMutation) RemovedIDs(name string) []ent.Value {
+ switch name {
+ case announcement.EdgeReads:
+ ids := make([]ent.Value, 0, len(m.removedreads))
+ for id := range m.removedreads {
+ ids = append(ids, id)
+ }
+ return ids
+ }
+ return nil
+}
+
+// ClearedEdges returns all edge names that were cleared in this mutation.
+func (m *AnnouncementMutation) ClearedEdges() []string {
+ edges := make([]string, 0, 1)
+ if m.clearedreads {
+ edges = append(edges, announcement.EdgeReads)
+ }
+ return edges
+}
+
+// EdgeCleared returns a boolean which indicates if the edge with the given name
+// was cleared in this mutation.
+func (m *AnnouncementMutation) EdgeCleared(name string) bool {
+ switch name {
+ case announcement.EdgeReads:
+ return m.clearedreads
+ }
+ return false
+}
+
+// ClearEdge clears the value of the edge with the given name. It returns an error
+// if that edge is not defined in the schema.
+func (m *AnnouncementMutation) ClearEdge(name string) error {
+ switch name {
+ }
+ return fmt.Errorf("unknown Announcement unique edge %s", name)
+}
+
+// ResetEdge resets all changes to the edge with the given name in this mutation.
+// It returns an error if the edge is not defined in the schema.
+func (m *AnnouncementMutation) ResetEdge(name string) error {
+ switch name {
+ case announcement.EdgeReads:
+ m.ResetReads()
+ return nil
+ }
+ return fmt.Errorf("unknown Announcement edge %s", name)
+}
+
+// AnnouncementReadMutation represents an operation that mutates the AnnouncementRead nodes in the graph.
+type AnnouncementReadMutation struct {
+ config
+ op Op
+ typ string
+ id *int64
+ read_at *time.Time
+ created_at *time.Time
+ clearedFields map[string]struct{}
+ announcement *int64
+ clearedannouncement bool
+ user *int64
+ cleareduser bool
+ done bool
+ oldValue func(context.Context) (*AnnouncementRead, error)
+ predicates []predicate.AnnouncementRead
+}
+
+var _ ent.Mutation = (*AnnouncementReadMutation)(nil)
+
+// announcementreadOption allows management of the mutation configuration using functional options.
+type announcementreadOption func(*AnnouncementReadMutation)
+
+// newAnnouncementReadMutation creates new mutation for the AnnouncementRead entity.
+func newAnnouncementReadMutation(c config, op Op, opts ...announcementreadOption) *AnnouncementReadMutation {
+ m := &AnnouncementReadMutation{
+ config: c,
+ op: op,
+ typ: TypeAnnouncementRead,
+ clearedFields: make(map[string]struct{}),
+ }
+ for _, opt := range opts {
+ opt(m)
+ }
+ return m
+}
+
+// withAnnouncementReadID sets the ID field of the mutation.
+func withAnnouncementReadID(id int64) announcementreadOption {
+ return func(m *AnnouncementReadMutation) {
+ var (
+ err error
+ once sync.Once
+ value *AnnouncementRead
+ )
+ m.oldValue = func(ctx context.Context) (*AnnouncementRead, error) {
+ once.Do(func() {
+ if m.done {
+ err = errors.New("querying old values post mutation is not allowed")
+ } else {
+ value, err = m.Client().AnnouncementRead.Get(ctx, id)
+ }
+ })
+ return value, err
+ }
+ m.id = &id
+ }
+}
+
+// withAnnouncementRead sets the old AnnouncementRead of the mutation.
+func withAnnouncementRead(node *AnnouncementRead) announcementreadOption {
+ return func(m *AnnouncementReadMutation) {
+ m.oldValue = func(context.Context) (*AnnouncementRead, error) {
+ return node, nil
+ }
+ m.id = &node.ID
+ }
+}
+
+// Client returns a new `ent.Client` from the mutation. If the mutation was
+// executed in a transaction (ent.Tx), a transactional client is returned.
+func (m AnnouncementReadMutation) Client() *Client {
+ client := &Client{config: m.config}
+ client.init()
+ return client
+}
+
+// Tx returns an `ent.Tx` for mutations that were executed in transactions;
+// it returns an error otherwise.
+func (m AnnouncementReadMutation) Tx() (*Tx, error) {
+ if _, ok := m.driver.(*txDriver); !ok {
+ return nil, errors.New("ent: mutation is not running in a transaction")
+ }
+ tx := &Tx{config: m.config}
+ tx.init()
+ return tx, nil
+}
+
+// ID returns the ID value in the mutation. Note that the ID is only available
+// if it was provided to the builder or after it was returned from the database.
+func (m *AnnouncementReadMutation) ID() (id int64, exists bool) {
+ if m.id == nil {
+ return
+ }
+ return *m.id, true
+}
+
+// IDs queries the database and returns the entity ids that match the mutation's predicate.
+// That means, if the mutation is applied within a transaction with an isolation level such
+// as sql.LevelSerializable, the returned ids match the ids of the rows that will be updated
+// or updated by the mutation.
+func (m *AnnouncementReadMutation) IDs(ctx context.Context) ([]int64, error) {
+ switch {
+ case m.op.Is(OpUpdateOne | OpDeleteOne):
+ id, exists := m.ID()
+ if exists {
+ return []int64{id}, nil
+ }
+ fallthrough
+ case m.op.Is(OpUpdate | OpDelete):
+ return m.Client().AnnouncementRead.Query().Where(m.predicates...).IDs(ctx)
+ default:
+ return nil, fmt.Errorf("IDs is not allowed on %s operations", m.op)
+ }
+}
+
+// SetAnnouncementID sets the "announcement_id" field.
+func (m *AnnouncementReadMutation) SetAnnouncementID(i int64) {
+ m.announcement = &i
+}
+
+// AnnouncementID returns the value of the "announcement_id" field in the mutation.
+func (m *AnnouncementReadMutation) AnnouncementID() (r int64, exists bool) {
+ v := m.announcement
+ if v == nil {
+ return
+ }
+ return *v, true
+}
+
+// OldAnnouncementID returns the old "announcement_id" field's value of the AnnouncementRead entity.
+// If the AnnouncementRead object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *AnnouncementReadMutation) OldAnnouncementID(ctx context.Context) (v int64, err error) {
+ if !m.op.Is(OpUpdateOne) {
+ return v, errors.New("OldAnnouncementID is only allowed on UpdateOne operations")
+ }
+ if m.id == nil || m.oldValue == nil {
+ return v, errors.New("OldAnnouncementID requires an ID field in the mutation")
+ }
+ oldValue, err := m.oldValue(ctx)
+ if err != nil {
+ return v, fmt.Errorf("querying old value for OldAnnouncementID: %w", err)
+ }
+ return oldValue.AnnouncementID, nil
+}
+
+// ResetAnnouncementID resets all changes to the "announcement_id" field.
+func (m *AnnouncementReadMutation) ResetAnnouncementID() {
+ m.announcement = nil
+}
+
+// SetUserID sets the "user_id" field.
+func (m *AnnouncementReadMutation) SetUserID(i int64) {
+ m.user = &i
+}
+
+// UserID returns the value of the "user_id" field in the mutation.
+func (m *AnnouncementReadMutation) UserID() (r int64, exists bool) {
+ v := m.user
+ if v == nil {
+ return
+ }
+ return *v, true
+}
+
+// OldUserID returns the old "user_id" field's value of the AnnouncementRead entity.
+// If the AnnouncementRead object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *AnnouncementReadMutation) OldUserID(ctx context.Context) (v int64, err error) {
+ if !m.op.Is(OpUpdateOne) {
+ return v, errors.New("OldUserID is only allowed on UpdateOne operations")
+ }
+ if m.id == nil || m.oldValue == nil {
+ return v, errors.New("OldUserID requires an ID field in the mutation")
+ }
+ oldValue, err := m.oldValue(ctx)
+ if err != nil {
+ return v, fmt.Errorf("querying old value for OldUserID: %w", err)
+ }
+ return oldValue.UserID, nil
+}
+
+// ResetUserID resets all changes to the "user_id" field.
+func (m *AnnouncementReadMutation) ResetUserID() {
+ m.user = nil
+}
+
+// SetReadAt sets the "read_at" field.
+func (m *AnnouncementReadMutation) SetReadAt(t time.Time) {
+ m.read_at = &t
+}
+
+// ReadAt returns the value of the "read_at" field in the mutation.
+func (m *AnnouncementReadMutation) ReadAt() (r time.Time, exists bool) {
+ v := m.read_at
+ if v == nil {
+ return
+ }
+ return *v, true
+}
+
+// OldReadAt returns the old "read_at" field's value of the AnnouncementRead entity.
+// If the AnnouncementRead object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *AnnouncementReadMutation) OldReadAt(ctx context.Context) (v time.Time, err error) {
+ if !m.op.Is(OpUpdateOne) {
+ return v, errors.New("OldReadAt is only allowed on UpdateOne operations")
+ }
+ if m.id == nil || m.oldValue == nil {
+ return v, errors.New("OldReadAt requires an ID field in the mutation")
+ }
+ oldValue, err := m.oldValue(ctx)
+ if err != nil {
+ return v, fmt.Errorf("querying old value for OldReadAt: %w", err)
+ }
+ return oldValue.ReadAt, nil
+}
+
+// ResetReadAt resets all changes to the "read_at" field.
+func (m *AnnouncementReadMutation) ResetReadAt() {
+ m.read_at = nil
+}
+
+// SetCreatedAt sets the "created_at" field.
+func (m *AnnouncementReadMutation) SetCreatedAt(t time.Time) {
+ m.created_at = &t
+}
+
+// CreatedAt returns the value of the "created_at" field in the mutation.
+func (m *AnnouncementReadMutation) CreatedAt() (r time.Time, exists bool) {
+ v := m.created_at
+ if v == nil {
+ return
+ }
+ return *v, true
+}
+
+// OldCreatedAt returns the old "created_at" field's value of the AnnouncementRead entity.
+// If the AnnouncementRead object wasn't provided to the builder, the object is fetched from the database.
+// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
+func (m *AnnouncementReadMutation) OldCreatedAt(ctx context.Context) (v time.Time, err error) {
+ if !m.op.Is(OpUpdateOne) {
+ return v, errors.New("OldCreatedAt is only allowed on UpdateOne operations")
+ }
+ if m.id == nil || m.oldValue == nil {
+ return v, errors.New("OldCreatedAt requires an ID field in the mutation")
+ }
+ oldValue, err := m.oldValue(ctx)
+ if err != nil {
+ return v, fmt.Errorf("querying old value for OldCreatedAt: %w", err)
+ }
+ return oldValue.CreatedAt, nil
+}
+
+// ResetCreatedAt resets all changes to the "created_at" field.
+func (m *AnnouncementReadMutation) ResetCreatedAt() {
+ m.created_at = nil
+}
+
+// ClearAnnouncement clears the "announcement" edge to the Announcement entity.
+func (m *AnnouncementReadMutation) ClearAnnouncement() {
+ m.clearedannouncement = true
+ m.clearedFields[announcementread.FieldAnnouncementID] = struct{}{}
+}
+
+// AnnouncementCleared reports if the "announcement" edge to the Announcement entity was cleared.
+func (m *AnnouncementReadMutation) AnnouncementCleared() bool {
+ return m.clearedannouncement
+}
+
+// AnnouncementIDs returns the "announcement" edge IDs in the mutation.
+// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use
+// AnnouncementID instead. It exists only for internal usage by the builders.
+func (m *AnnouncementReadMutation) AnnouncementIDs() (ids []int64) {
+ if id := m.announcement; id != nil {
+ ids = append(ids, *id)
+ }
+ return
+}
+
+// ResetAnnouncement resets all changes to the "announcement" edge.
+func (m *AnnouncementReadMutation) ResetAnnouncement() {
+ m.announcement = nil
+ m.clearedannouncement = false
+}
+
+// ClearUser clears the "user" edge to the User entity.
+func (m *AnnouncementReadMutation) ClearUser() {
+ m.cleareduser = true
+ m.clearedFields[announcementread.FieldUserID] = struct{}{}
+}
+
+// UserCleared reports if the "user" edge to the User entity was cleared.
+func (m *AnnouncementReadMutation) UserCleared() bool {
+ return m.cleareduser
+}
+
+// UserIDs returns the "user" edge IDs in the mutation.
+// Note that IDs always returns len(IDs) <= 1 for unique edges, and you should use
+// UserID instead. It exists only for internal usage by the builders.
+func (m *AnnouncementReadMutation) UserIDs() (ids []int64) {
+ if id := m.user; id != nil {
+ ids = append(ids, *id)
+ }
+ return
+}
+
+// ResetUser resets all changes to the "user" edge.
+func (m *AnnouncementReadMutation) ResetUser() {
+ m.user = nil
+ m.cleareduser = false
+}
+
+// Where appends a list predicates to the AnnouncementReadMutation builder.
+func (m *AnnouncementReadMutation) Where(ps ...predicate.AnnouncementRead) {
+ m.predicates = append(m.predicates, ps...)
+}
+
+// WhereP appends storage-level predicates to the AnnouncementReadMutation builder. Using this method,
+// users can use type-assertion to append predicates that do not depend on any generated package.
+func (m *AnnouncementReadMutation) WhereP(ps ...func(*sql.Selector)) {
+ p := make([]predicate.AnnouncementRead, len(ps))
+ for i := range ps {
+ p[i] = ps[i]
+ }
+ m.Where(p...)
+}
+
+// Op returns the operation name.
+func (m *AnnouncementReadMutation) Op() Op {
+ return m.op
+}
+
+// SetOp allows setting the mutation operation.
+func (m *AnnouncementReadMutation) SetOp(op Op) {
+ m.op = op
+}
+
+// Type returns the node type of this mutation (AnnouncementRead).
+func (m *AnnouncementReadMutation) Type() string {
+ return m.typ
+}
+
+// Fields returns all fields that were changed during this mutation. Note that in
+// order to get all numeric fields that were incremented/decremented, call
+// AddedFields().
+func (m *AnnouncementReadMutation) Fields() []string {
+ fields := make([]string, 0, 4)
+ if m.announcement != nil {
+ fields = append(fields, announcementread.FieldAnnouncementID)
+ }
+ if m.user != nil {
+ fields = append(fields, announcementread.FieldUserID)
+ }
+ if m.read_at != nil {
+ fields = append(fields, announcementread.FieldReadAt)
+ }
+ if m.created_at != nil {
+ fields = append(fields, announcementread.FieldCreatedAt)
+ }
+ return fields
+}
+
+// Field returns the value of a field with the given name. The second boolean
+// return value indicates that this field was not set, or was not defined in the
+// schema.
+func (m *AnnouncementReadMutation) Field(name string) (ent.Value, bool) {
+ switch name {
+ case announcementread.FieldAnnouncementID:
+ return m.AnnouncementID()
+ case announcementread.FieldUserID:
+ return m.UserID()
+ case announcementread.FieldReadAt:
+ return m.ReadAt()
+ case announcementread.FieldCreatedAt:
+ return m.CreatedAt()
+ }
+ return nil, false
+}
+
+// OldField returns the old value of the field from the database. An error is
+// returned if the mutation operation is not UpdateOne, or the query to the
+// database failed.
+func (m *AnnouncementReadMutation) OldField(ctx context.Context, name string) (ent.Value, error) {
+ switch name {
+ case announcementread.FieldAnnouncementID:
+ return m.OldAnnouncementID(ctx)
+ case announcementread.FieldUserID:
+ return m.OldUserID(ctx)
+ case announcementread.FieldReadAt:
+ return m.OldReadAt(ctx)
+ case announcementread.FieldCreatedAt:
+ return m.OldCreatedAt(ctx)
+ }
+ return nil, fmt.Errorf("unknown AnnouncementRead field %s", name)
+}
+
+// SetField sets the value of a field with the given name. It returns an error if
+// the field is not defined in the schema, or if the type mismatched the field
+// type.
+func (m *AnnouncementReadMutation) SetField(name string, value ent.Value) error {
+ switch name {
+ case announcementread.FieldAnnouncementID:
+ v, ok := value.(int64)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field %s", value, name)
+ }
+ m.SetAnnouncementID(v)
+ return nil
+ case announcementread.FieldUserID:
+ v, ok := value.(int64)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field %s", value, name)
+ }
+ m.SetUserID(v)
+ return nil
+ case announcementread.FieldReadAt:
+ v, ok := value.(time.Time)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field %s", value, name)
+ }
+ m.SetReadAt(v)
+ return nil
+ case announcementread.FieldCreatedAt:
+ v, ok := value.(time.Time)
+ if !ok {
+ return fmt.Errorf("unexpected type %T for field %s", value, name)
+ }
+ m.SetCreatedAt(v)
+ return nil
+ }
+ return fmt.Errorf("unknown AnnouncementRead field %s", name)
+}
+
+// AddedFields returns all numeric fields that were incremented/decremented during
+// this mutation.
+func (m *AnnouncementReadMutation) AddedFields() []string {
+ var fields []string
+ return fields
+}
+
+// AddedField returns the numeric value that was incremented/decremented on a field
+// with the given name. The second boolean return value indicates that this field
+// was not set, or was not defined in the schema.
+func (m *AnnouncementReadMutation) AddedField(name string) (ent.Value, bool) {
+ switch name {
+ }
+ return nil, false
+}
+
+// AddField adds the value to the field with the given name. It returns an error if
+// the field is not defined in the schema, or if the type mismatched the field
+// type.
+func (m *AnnouncementReadMutation) AddField(name string, value ent.Value) error {
+ switch name {
+ }
+ return fmt.Errorf("unknown AnnouncementRead numeric field %s", name)
+}
+
+// ClearedFields returns all nullable fields that were cleared during this
+// mutation.
+func (m *AnnouncementReadMutation) ClearedFields() []string {
+ return nil
+}
+
+// FieldCleared returns a boolean indicating if a field with the given name was
+// cleared in this mutation.
+func (m *AnnouncementReadMutation) FieldCleared(name string) bool {
+ _, ok := m.clearedFields[name]
+ return ok
+}
+
+// ClearField clears the value of the field with the given name. It returns an
+// error if the field is not defined in the schema.
+func (m *AnnouncementReadMutation) ClearField(name string) error {
+ return fmt.Errorf("unknown AnnouncementRead nullable field %s", name)
+}
+
+// ResetField resets all changes in the mutation for the field with the given name.
+// It returns an error if the field is not defined in the schema.
+func (m *AnnouncementReadMutation) ResetField(name string) error {
+ switch name {
+ case announcementread.FieldAnnouncementID:
+ m.ResetAnnouncementID()
+ return nil
+ case announcementread.FieldUserID:
+ m.ResetUserID()
+ return nil
+ case announcementread.FieldReadAt:
+ m.ResetReadAt()
+ return nil
+ case announcementread.FieldCreatedAt:
+ m.ResetCreatedAt()
+ return nil
+ }
+ return fmt.Errorf("unknown AnnouncementRead field %s", name)
+}
+
+// AddedEdges returns all edge names that were set/added in this mutation.
+func (m *AnnouncementReadMutation) AddedEdges() []string {
+ edges := make([]string, 0, 2)
+ if m.announcement != nil {
+ edges = append(edges, announcementread.EdgeAnnouncement)
+ }
+ if m.user != nil {
+ edges = append(edges, announcementread.EdgeUser)
+ }
+ return edges
+}
+
+// AddedIDs returns all IDs (to other nodes) that were added for the given edge
+// name in this mutation.
+func (m *AnnouncementReadMutation) AddedIDs(name string) []ent.Value {
+ switch name {
+ case announcementread.EdgeAnnouncement:
+ if id := m.announcement; id != nil {
+ return []ent.Value{*id}
+ }
+ case announcementread.EdgeUser:
+ if id := m.user; id != nil {
+ return []ent.Value{*id}
+ }
+ }
+ return nil
+}
+
+// RemovedEdges returns all edge names that were removed in this mutation.
+func (m *AnnouncementReadMutation) RemovedEdges() []string {
+ edges := make([]string, 0, 2)
+ return edges
+}
+
+// RemovedIDs returns all IDs (to other nodes) that were removed for the edge with
+// the given name in this mutation.
+func (m *AnnouncementReadMutation) RemovedIDs(name string) []ent.Value {
+ return nil
+}
+
+// ClearedEdges returns all edge names that were cleared in this mutation.
+func (m *AnnouncementReadMutation) ClearedEdges() []string {
+ edges := make([]string, 0, 2)
+ if m.clearedannouncement {
+ edges = append(edges, announcementread.EdgeAnnouncement)
+ }
+ if m.cleareduser {
+ edges = append(edges, announcementread.EdgeUser)
+ }
+ return edges
+}
+
+// EdgeCleared returns a boolean which indicates if the edge with the given name
+// was cleared in this mutation.
+func (m *AnnouncementReadMutation) EdgeCleared(name string) bool {
+ switch name {
+ case announcementread.EdgeAnnouncement:
+ return m.clearedannouncement
+ case announcementread.EdgeUser:
+ return m.cleareduser
+ }
+ return false
+}
+
+// ClearEdge clears the value of the edge with the given name. It returns an error
+// if that edge is not defined in the schema.
+func (m *AnnouncementReadMutation) ClearEdge(name string) error {
+ switch name {
+ case announcementread.EdgeAnnouncement:
+ m.ClearAnnouncement()
+ return nil
+ case announcementread.EdgeUser:
+ m.ClearUser()
+ return nil
+ }
+ return fmt.Errorf("unknown AnnouncementRead unique edge %s", name)
+}
+
+// ResetEdge resets all changes to the edge with the given name in this mutation.
+// It returns an error if the edge is not defined in the schema.
+func (m *AnnouncementReadMutation) ResetEdge(name string) error {
+ switch name {
+ case announcementread.EdgeAnnouncement:
+ m.ResetAnnouncement()
+ return nil
+ case announcementread.EdgeUser:
+ m.ResetUser()
+ return nil
+ }
+ return fmt.Errorf("unknown AnnouncementRead edge %s", name)
+}
+
// GroupMutation represents an operation that mutates the Group nodes in the graph.
type GroupMutation struct {
config
@@ -14376,6 +16046,9 @@ type UserMutation struct {
assigned_subscriptions map[int64]struct{}
removedassigned_subscriptions map[int64]struct{}
clearedassigned_subscriptions bool
+ announcement_reads map[int64]struct{}
+ removedannouncement_reads map[int64]struct{}
+ clearedannouncement_reads bool
allowed_groups map[int64]struct{}
removedallowed_groups map[int64]struct{}
clearedallowed_groups bool
@@ -15290,6 +16963,60 @@ func (m *UserMutation) ResetAssignedSubscriptions() {
m.removedassigned_subscriptions = nil
}
+// AddAnnouncementReadIDs adds the "announcement_reads" edge to the AnnouncementRead entity by ids.
+func (m *UserMutation) AddAnnouncementReadIDs(ids ...int64) {
+ if m.announcement_reads == nil {
+ m.announcement_reads = make(map[int64]struct{})
+ }
+ for i := range ids {
+ m.announcement_reads[ids[i]] = struct{}{}
+ }
+}
+
+// ClearAnnouncementReads clears the "announcement_reads" edge to the AnnouncementRead entity.
+func (m *UserMutation) ClearAnnouncementReads() {
+ m.clearedannouncement_reads = true
+}
+
+// AnnouncementReadsCleared reports if the "announcement_reads" edge to the AnnouncementRead entity was cleared.
+func (m *UserMutation) AnnouncementReadsCleared() bool {
+ return m.clearedannouncement_reads
+}
+
+// RemoveAnnouncementReadIDs removes the "announcement_reads" edge to the AnnouncementRead entity by IDs.
+func (m *UserMutation) RemoveAnnouncementReadIDs(ids ...int64) {
+ if m.removedannouncement_reads == nil {
+ m.removedannouncement_reads = make(map[int64]struct{})
+ }
+ for i := range ids {
+ delete(m.announcement_reads, ids[i])
+ m.removedannouncement_reads[ids[i]] = struct{}{}
+ }
+}
+
+// RemovedAnnouncementReads returns the removed IDs of the "announcement_reads" edge to the AnnouncementRead entity.
+func (m *UserMutation) RemovedAnnouncementReadsIDs() (ids []int64) {
+ for id := range m.removedannouncement_reads {
+ ids = append(ids, id)
+ }
+ return
+}
+
+// AnnouncementReadsIDs returns the "announcement_reads" edge IDs in the mutation.
+func (m *UserMutation) AnnouncementReadsIDs() (ids []int64) {
+ for id := range m.announcement_reads {
+ ids = append(ids, id)
+ }
+ return
+}
+
+// ResetAnnouncementReads resets all changes to the "announcement_reads" edge.
+func (m *UserMutation) ResetAnnouncementReads() {
+ m.announcement_reads = nil
+ m.clearedannouncement_reads = false
+ m.removedannouncement_reads = nil
+}
+
// AddAllowedGroupIDs adds the "allowed_groups" edge to the Group entity by ids.
func (m *UserMutation) AddAllowedGroupIDs(ids ...int64) {
if m.allowed_groups == nil {
@@ -15908,7 +17635,7 @@ func (m *UserMutation) ResetField(name string) error {
// AddedEdges returns all edge names that were set/added in this mutation.
func (m *UserMutation) AddedEdges() []string {
- edges := make([]string, 0, 8)
+ edges := make([]string, 0, 9)
if m.api_keys != nil {
edges = append(edges, user.EdgeAPIKeys)
}
@@ -15921,6 +17648,9 @@ func (m *UserMutation) AddedEdges() []string {
if m.assigned_subscriptions != nil {
edges = append(edges, user.EdgeAssignedSubscriptions)
}
+ if m.announcement_reads != nil {
+ edges = append(edges, user.EdgeAnnouncementReads)
+ }
if m.allowed_groups != nil {
edges = append(edges, user.EdgeAllowedGroups)
}
@@ -15964,6 +17694,12 @@ func (m *UserMutation) AddedIDs(name string) []ent.Value {
ids = append(ids, id)
}
return ids
+ case user.EdgeAnnouncementReads:
+ ids := make([]ent.Value, 0, len(m.announcement_reads))
+ for id := range m.announcement_reads {
+ ids = append(ids, id)
+ }
+ return ids
case user.EdgeAllowedGroups:
ids := make([]ent.Value, 0, len(m.allowed_groups))
for id := range m.allowed_groups {
@@ -15994,7 +17730,7 @@ func (m *UserMutation) AddedIDs(name string) []ent.Value {
// RemovedEdges returns all edge names that were removed in this mutation.
func (m *UserMutation) RemovedEdges() []string {
- edges := make([]string, 0, 8)
+ edges := make([]string, 0, 9)
if m.removedapi_keys != nil {
edges = append(edges, user.EdgeAPIKeys)
}
@@ -16007,6 +17743,9 @@ func (m *UserMutation) RemovedEdges() []string {
if m.removedassigned_subscriptions != nil {
edges = append(edges, user.EdgeAssignedSubscriptions)
}
+ if m.removedannouncement_reads != nil {
+ edges = append(edges, user.EdgeAnnouncementReads)
+ }
if m.removedallowed_groups != nil {
edges = append(edges, user.EdgeAllowedGroups)
}
@@ -16050,6 +17789,12 @@ func (m *UserMutation) RemovedIDs(name string) []ent.Value {
ids = append(ids, id)
}
return ids
+ case user.EdgeAnnouncementReads:
+ ids := make([]ent.Value, 0, len(m.removedannouncement_reads))
+ for id := range m.removedannouncement_reads {
+ ids = append(ids, id)
+ }
+ return ids
case user.EdgeAllowedGroups:
ids := make([]ent.Value, 0, len(m.removedallowed_groups))
for id := range m.removedallowed_groups {
@@ -16080,7 +17825,7 @@ func (m *UserMutation) RemovedIDs(name string) []ent.Value {
// ClearedEdges returns all edge names that were cleared in this mutation.
func (m *UserMutation) ClearedEdges() []string {
- edges := make([]string, 0, 8)
+ edges := make([]string, 0, 9)
if m.clearedapi_keys {
edges = append(edges, user.EdgeAPIKeys)
}
@@ -16093,6 +17838,9 @@ func (m *UserMutation) ClearedEdges() []string {
if m.clearedassigned_subscriptions {
edges = append(edges, user.EdgeAssignedSubscriptions)
}
+ if m.clearedannouncement_reads {
+ edges = append(edges, user.EdgeAnnouncementReads)
+ }
if m.clearedallowed_groups {
edges = append(edges, user.EdgeAllowedGroups)
}
@@ -16120,6 +17868,8 @@ func (m *UserMutation) EdgeCleared(name string) bool {
return m.clearedsubscriptions
case user.EdgeAssignedSubscriptions:
return m.clearedassigned_subscriptions
+ case user.EdgeAnnouncementReads:
+ return m.clearedannouncement_reads
case user.EdgeAllowedGroups:
return m.clearedallowed_groups
case user.EdgeUsageLogs:
@@ -16156,6 +17906,9 @@ func (m *UserMutation) ResetEdge(name string) error {
case user.EdgeAssignedSubscriptions:
m.ResetAssignedSubscriptions()
return nil
+ case user.EdgeAnnouncementReads:
+ m.ResetAnnouncementReads()
+ return nil
case user.EdgeAllowedGroups:
m.ResetAllowedGroups()
return nil
diff --git a/backend/ent/predicate/predicate.go b/backend/ent/predicate/predicate.go
index 785cb4e6..613c5913 100644
--- a/backend/ent/predicate/predicate.go
+++ b/backend/ent/predicate/predicate.go
@@ -15,6 +15,12 @@ type Account func(*sql.Selector)
// AccountGroup is the predicate function for accountgroup builders.
type AccountGroup func(*sql.Selector)
+// Announcement is the predicate function for announcement builders.
+type Announcement func(*sql.Selector)
+
+// AnnouncementRead is the predicate function for announcementread builders.
+type AnnouncementRead func(*sql.Selector)
+
// Group is the predicate function for group builders.
type Group func(*sql.Selector)
diff --git a/backend/ent/runtime/runtime.go b/backend/ent/runtime/runtime.go
index 14323f8c..ae4eece8 100644
--- a/backend/ent/runtime/runtime.go
+++ b/backend/ent/runtime/runtime.go
@@ -7,6 +7,8 @@ import (
"github.com/Wei-Shaw/sub2api/ent/account"
"github.com/Wei-Shaw/sub2api/ent/accountgroup"
+ "github.com/Wei-Shaw/sub2api/ent/announcement"
+ "github.com/Wei-Shaw/sub2api/ent/announcementread"
"github.com/Wei-Shaw/sub2api/ent/apikey"
"github.com/Wei-Shaw/sub2api/ent/group"
"github.com/Wei-Shaw/sub2api/ent/promocode"
@@ -210,6 +212,56 @@ func init() {
accountgroupDescCreatedAt := accountgroupFields[3].Descriptor()
// accountgroup.DefaultCreatedAt holds the default value on creation for the created_at field.
accountgroup.DefaultCreatedAt = accountgroupDescCreatedAt.Default.(func() time.Time)
+ announcementFields := schema.Announcement{}.Fields()
+ _ = announcementFields
+ // announcementDescTitle is the schema descriptor for title field.
+ announcementDescTitle := announcementFields[0].Descriptor()
+ // announcement.TitleValidator is a validator for the "title" field. It is called by the builders before save.
+ announcement.TitleValidator = func() func(string) error {
+ validators := announcementDescTitle.Validators
+ fns := [...]func(string) error{
+ validators[0].(func(string) error),
+ validators[1].(func(string) error),
+ }
+ return func(title string) error {
+ for _, fn := range fns {
+ if err := fn(title); err != nil {
+ return err
+ }
+ }
+ return nil
+ }
+ }()
+ // announcementDescContent is the schema descriptor for content field.
+ announcementDescContent := announcementFields[1].Descriptor()
+ // announcement.ContentValidator is a validator for the "content" field. It is called by the builders before save.
+ announcement.ContentValidator = announcementDescContent.Validators[0].(func(string) error)
+ // announcementDescStatus is the schema descriptor for status field.
+ announcementDescStatus := announcementFields[2].Descriptor()
+ // announcement.DefaultStatus holds the default value on creation for the status field.
+ announcement.DefaultStatus = announcementDescStatus.Default.(string)
+ // announcement.StatusValidator is a validator for the "status" field. It is called by the builders before save.
+ announcement.StatusValidator = announcementDescStatus.Validators[0].(func(string) error)
+ // announcementDescCreatedAt is the schema descriptor for created_at field.
+ announcementDescCreatedAt := announcementFields[8].Descriptor()
+ // announcement.DefaultCreatedAt holds the default value on creation for the created_at field.
+ announcement.DefaultCreatedAt = announcementDescCreatedAt.Default.(func() time.Time)
+ // announcementDescUpdatedAt is the schema descriptor for updated_at field.
+ announcementDescUpdatedAt := announcementFields[9].Descriptor()
+ // announcement.DefaultUpdatedAt holds the default value on creation for the updated_at field.
+ announcement.DefaultUpdatedAt = announcementDescUpdatedAt.Default.(func() time.Time)
+ // announcement.UpdateDefaultUpdatedAt holds the default value on update for the updated_at field.
+ announcement.UpdateDefaultUpdatedAt = announcementDescUpdatedAt.UpdateDefault.(func() time.Time)
+ announcementreadFields := schema.AnnouncementRead{}.Fields()
+ _ = announcementreadFields
+ // announcementreadDescReadAt is the schema descriptor for read_at field.
+ announcementreadDescReadAt := announcementreadFields[2].Descriptor()
+ // announcementread.DefaultReadAt holds the default value on creation for the read_at field.
+ announcementread.DefaultReadAt = announcementreadDescReadAt.Default.(func() time.Time)
+ // announcementreadDescCreatedAt is the schema descriptor for created_at field.
+ announcementreadDescCreatedAt := announcementreadFields[3].Descriptor()
+ // announcementread.DefaultCreatedAt holds the default value on creation for the created_at field.
+ announcementread.DefaultCreatedAt = announcementreadDescCreatedAt.Default.(func() time.Time)
groupMixin := schema.Group{}.Mixin()
groupMixinHooks1 := groupMixin[1].Hooks()
group.Hooks[0] = groupMixinHooks1[0]
diff --git a/backend/ent/schema/account.go b/backend/ent/schema/account.go
index dd79ba96..1cfecc2d 100644
--- a/backend/ent/schema/account.go
+++ b/backend/ent/schema/account.go
@@ -4,7 +4,7 @@ package schema
import (
"github.com/Wei-Shaw/sub2api/ent/schema/mixins"
- "github.com/Wei-Shaw/sub2api/internal/service"
+ "github.com/Wei-Shaw/sub2api/internal/domain"
"entgo.io/ent"
"entgo.io/ent/dialect"
@@ -111,7 +111,7 @@ func (Account) Fields() []ent.Field {
// status: 账户状态,如 "active", "error", "disabled"
field.String("status").
MaxLen(20).
- Default(service.StatusActive),
+ Default(domain.StatusActive),
// error_message: 错误信息,记录账户异常时的详细信息
field.String("error_message").
diff --git a/backend/ent/schema/announcement.go b/backend/ent/schema/announcement.go
new file mode 100644
index 00000000..3b534831
--- /dev/null
+++ b/backend/ent/schema/announcement.go
@@ -0,0 +1,91 @@
+package schema
+
+import (
+ "time"
+
+ "github.com/Wei-Shaw/sub2api/internal/domain"
+
+ "entgo.io/ent"
+ "entgo.io/ent/dialect"
+ "entgo.io/ent/dialect/entsql"
+ "entgo.io/ent/schema"
+ "entgo.io/ent/schema/edge"
+ "entgo.io/ent/schema/field"
+ "entgo.io/ent/schema/index"
+)
+
+// Announcement holds the schema definition for the Announcement entity.
+//
+// 删除策略:硬删除(已读记录通过外键级联删除)
+type Announcement struct {
+ ent.Schema
+}
+
+func (Announcement) Annotations() []schema.Annotation {
+ return []schema.Annotation{
+ entsql.Annotation{Table: "announcements"},
+ }
+}
+
+func (Announcement) Fields() []ent.Field {
+ return []ent.Field{
+ field.String("title").
+ MaxLen(200).
+ NotEmpty().
+ Comment("公告标题"),
+ field.String("content").
+ SchemaType(map[string]string{dialect.Postgres: "text"}).
+ NotEmpty().
+ Comment("公告内容(支持 Markdown)"),
+ field.String("status").
+ MaxLen(20).
+ Default(domain.AnnouncementStatusDraft).
+ Comment("状态: draft, active, archived"),
+ field.JSON("targeting", domain.AnnouncementTargeting{}).
+ Optional().
+ SchemaType(map[string]string{dialect.Postgres: "jsonb"}).
+ Comment("展示条件(JSON 规则)"),
+ field.Time("starts_at").
+ Optional().
+ Nillable().
+ SchemaType(map[string]string{dialect.Postgres: "timestamptz"}).
+ Comment("开始展示时间(为空表示立即生效)"),
+ field.Time("ends_at").
+ Optional().
+ Nillable().
+ SchemaType(map[string]string{dialect.Postgres: "timestamptz"}).
+ Comment("结束展示时间(为空表示永久生效)"),
+ field.Int64("created_by").
+ Optional().
+ Nillable().
+ Comment("创建人用户ID(管理员)"),
+ field.Int64("updated_by").
+ Optional().
+ Nillable().
+ Comment("更新人用户ID(管理员)"),
+ field.Time("created_at").
+ Immutable().
+ Default(time.Now).
+ SchemaType(map[string]string{dialect.Postgres: "timestamptz"}),
+ field.Time("updated_at").
+ Default(time.Now).
+ UpdateDefault(time.Now).
+ SchemaType(map[string]string{dialect.Postgres: "timestamptz"}),
+ }
+}
+
+func (Announcement) Edges() []ent.Edge {
+ return []ent.Edge{
+ edge.To("reads", AnnouncementRead.Type),
+ }
+}
+
+func (Announcement) Indexes() []ent.Index {
+ return []ent.Index{
+ index.Fields("status"),
+ index.Fields("created_at"),
+ index.Fields("starts_at"),
+ index.Fields("ends_at"),
+ }
+}
+
diff --git a/backend/ent/schema/announcement_read.go b/backend/ent/schema/announcement_read.go
new file mode 100644
index 00000000..2f80d8b2
--- /dev/null
+++ b/backend/ent/schema/announcement_read.go
@@ -0,0 +1,66 @@
+package schema
+
+import (
+ "time"
+
+ "entgo.io/ent"
+ "entgo.io/ent/dialect"
+ "entgo.io/ent/dialect/entsql"
+ "entgo.io/ent/schema"
+ "entgo.io/ent/schema/edge"
+ "entgo.io/ent/schema/field"
+ "entgo.io/ent/schema/index"
+)
+
+// AnnouncementRead holds the schema definition for the AnnouncementRead entity.
+//
+// 记录用户对公告的已读状态(首次已读时间)。
+type AnnouncementRead struct {
+ ent.Schema
+}
+
+func (AnnouncementRead) Annotations() []schema.Annotation {
+ return []schema.Annotation{
+ entsql.Annotation{Table: "announcement_reads"},
+ }
+}
+
+func (AnnouncementRead) Fields() []ent.Field {
+ return []ent.Field{
+ field.Int64("announcement_id"),
+ field.Int64("user_id"),
+ field.Time("read_at").
+ Default(time.Now).
+ SchemaType(map[string]string{dialect.Postgres: "timestamptz"}).
+ Comment("用户首次已读时间"),
+ field.Time("created_at").
+ Immutable().
+ Default(time.Now).
+ SchemaType(map[string]string{dialect.Postgres: "timestamptz"}),
+ }
+}
+
+func (AnnouncementRead) Edges() []ent.Edge {
+ return []ent.Edge{
+ edge.From("announcement", Announcement.Type).
+ Ref("reads").
+ Field("announcement_id").
+ Unique().
+ Required(),
+ edge.From("user", User.Type).
+ Ref("announcement_reads").
+ Field("user_id").
+ Unique().
+ Required(),
+ }
+}
+
+func (AnnouncementRead) Indexes() []ent.Index {
+ return []ent.Index{
+ index.Fields("announcement_id"),
+ index.Fields("user_id"),
+ index.Fields("read_at"),
+ index.Fields("announcement_id", "user_id").Unique(),
+ }
+}
+
diff --git a/backend/ent/schema/api_key.go b/backend/ent/schema/api_key.go
index 1b206089..1c2d4bd4 100644
--- a/backend/ent/schema/api_key.go
+++ b/backend/ent/schema/api_key.go
@@ -2,7 +2,7 @@ package schema
import (
"github.com/Wei-Shaw/sub2api/ent/schema/mixins"
- "github.com/Wei-Shaw/sub2api/internal/service"
+ "github.com/Wei-Shaw/sub2api/internal/domain"
"entgo.io/ent"
"entgo.io/ent/dialect/entsql"
@@ -45,7 +45,7 @@ func (APIKey) Fields() []ent.Field {
Nillable(),
field.String("status").
MaxLen(20).
- Default(service.StatusActive),
+ Default(domain.StatusActive),
field.JSON("ip_whitelist", []string{}).
Optional().
Comment("Allowed IPs/CIDRs, e.g. [\"192.168.1.100\", \"10.0.0.0/8\"]"),
diff --git a/backend/ent/schema/group.go b/backend/ent/schema/group.go
index 5d0a1e9a..ccd72eac 100644
--- a/backend/ent/schema/group.go
+++ b/backend/ent/schema/group.go
@@ -2,7 +2,7 @@ package schema
import (
"github.com/Wei-Shaw/sub2api/ent/schema/mixins"
- "github.com/Wei-Shaw/sub2api/internal/service"
+ "github.com/Wei-Shaw/sub2api/internal/domain"
"entgo.io/ent"
"entgo.io/ent/dialect"
@@ -49,15 +49,15 @@ func (Group) Fields() []ent.Field {
Default(false),
field.String("status").
MaxLen(20).
- Default(service.StatusActive),
+ Default(domain.StatusActive),
// Subscription-related fields (added by migration 003)
field.String("platform").
MaxLen(50).
- Default(service.PlatformAnthropic),
+ Default(domain.PlatformAnthropic),
field.String("subscription_type").
MaxLen(20).
- Default(service.SubscriptionTypeStandard),
+ Default(domain.SubscriptionTypeStandard),
field.Float("daily_limit_usd").
Optional().
Nillable().
diff --git a/backend/ent/schema/promo_code.go b/backend/ent/schema/promo_code.go
index c3bb824b..3dd08c0e 100644
--- a/backend/ent/schema/promo_code.go
+++ b/backend/ent/schema/promo_code.go
@@ -3,7 +3,7 @@ package schema
import (
"time"
- "github.com/Wei-Shaw/sub2api/internal/service"
+ "github.com/Wei-Shaw/sub2api/internal/domain"
"entgo.io/ent"
"entgo.io/ent/dialect"
@@ -49,7 +49,7 @@ func (PromoCode) Fields() []ent.Field {
Comment("已使用次数"),
field.String("status").
MaxLen(20).
- Default(service.PromoCodeStatusActive).
+ Default(domain.PromoCodeStatusActive).
Comment("状态: active, disabled"),
field.Time("expires_at").
Optional().
diff --git a/backend/ent/schema/redeem_code.go b/backend/ent/schema/redeem_code.go
index b4664e06..6fb86148 100644
--- a/backend/ent/schema/redeem_code.go
+++ b/backend/ent/schema/redeem_code.go
@@ -3,7 +3,7 @@ package schema
import (
"time"
- "github.com/Wei-Shaw/sub2api/internal/service"
+ "github.com/Wei-Shaw/sub2api/internal/domain"
"entgo.io/ent"
"entgo.io/ent/dialect"
@@ -41,13 +41,13 @@ func (RedeemCode) Fields() []ent.Field {
Unique(),
field.String("type").
MaxLen(20).
- Default(service.RedeemTypeBalance),
+ Default(domain.RedeemTypeBalance),
field.Float("value").
SchemaType(map[string]string{dialect.Postgres: "decimal(20,8)"}).
Default(0),
field.String("status").
MaxLen(20).
- Default(service.StatusUnused),
+ Default(domain.StatusUnused),
field.Int64("used_by").
Optional().
Nillable(),
diff --git a/backend/ent/schema/user.go b/backend/ent/schema/user.go
index 335c1cc8..d443ef45 100644
--- a/backend/ent/schema/user.go
+++ b/backend/ent/schema/user.go
@@ -2,7 +2,7 @@ package schema
import (
"github.com/Wei-Shaw/sub2api/ent/schema/mixins"
- "github.com/Wei-Shaw/sub2api/internal/service"
+ "github.com/Wei-Shaw/sub2api/internal/domain"
"entgo.io/ent"
"entgo.io/ent/dialect"
@@ -43,7 +43,7 @@ func (User) Fields() []ent.Field {
NotEmpty(),
field.String("role").
MaxLen(20).
- Default(service.RoleUser),
+ Default(domain.RoleUser),
field.Float("balance").
SchemaType(map[string]string{dialect.Postgres: "decimal(20,8)"}).
Default(0),
@@ -51,7 +51,7 @@ func (User) Fields() []ent.Field {
Default(5),
field.String("status").
MaxLen(20).
- Default(service.StatusActive),
+ Default(domain.StatusActive),
// Optional profile fields (added later; default '' in DB migration)
field.String("username").
@@ -81,6 +81,7 @@ func (User) Edges() []ent.Edge {
edge.To("redeem_codes", RedeemCode.Type),
edge.To("subscriptions", UserSubscription.Type),
edge.To("assigned_subscriptions", UserSubscription.Type),
+ edge.To("announcement_reads", AnnouncementRead.Type),
edge.To("allowed_groups", Group.Type).
Through("user_allowed_groups", UserAllowedGroup.Type),
edge.To("usage_logs", UsageLog.Type),
diff --git a/backend/ent/schema/user_subscription.go b/backend/ent/schema/user_subscription.go
index b21f4083..fa13612b 100644
--- a/backend/ent/schema/user_subscription.go
+++ b/backend/ent/schema/user_subscription.go
@@ -4,7 +4,7 @@ import (
"time"
"github.com/Wei-Shaw/sub2api/ent/schema/mixins"
- "github.com/Wei-Shaw/sub2api/internal/service"
+ "github.com/Wei-Shaw/sub2api/internal/domain"
"entgo.io/ent"
"entgo.io/ent/dialect"
@@ -44,7 +44,7 @@ func (UserSubscription) Fields() []ent.Field {
SchemaType(map[string]string{dialect.Postgres: "timestamptz"}),
field.String("status").
MaxLen(20).
- Default(service.SubscriptionStatusActive),
+ Default(domain.SubscriptionStatusActive),
field.Time("daily_window_start").
Optional().
diff --git a/backend/ent/tx.go b/backend/ent/tx.go
index 7ff16ec8..702bdf90 100644
--- a/backend/ent/tx.go
+++ b/backend/ent/tx.go
@@ -20,6 +20,10 @@ type Tx struct {
Account *AccountClient
// AccountGroup is the client for interacting with the AccountGroup builders.
AccountGroup *AccountGroupClient
+ // Announcement is the client for interacting with the Announcement builders.
+ Announcement *AnnouncementClient
+ // AnnouncementRead is the client for interacting with the AnnouncementRead builders.
+ AnnouncementRead *AnnouncementReadClient
// Group is the client for interacting with the Group builders.
Group *GroupClient
// PromoCode is the client for interacting with the PromoCode builders.
@@ -180,6 +184,8 @@ func (tx *Tx) init() {
tx.APIKey = NewAPIKeyClient(tx.config)
tx.Account = NewAccountClient(tx.config)
tx.AccountGroup = NewAccountGroupClient(tx.config)
+ tx.Announcement = NewAnnouncementClient(tx.config)
+ tx.AnnouncementRead = NewAnnouncementReadClient(tx.config)
tx.Group = NewGroupClient(tx.config)
tx.PromoCode = NewPromoCodeClient(tx.config)
tx.PromoCodeUsage = NewPromoCodeUsageClient(tx.config)
diff --git a/backend/ent/user.go b/backend/ent/user.go
index 82830a95..2435aa1b 100644
--- a/backend/ent/user.go
+++ b/backend/ent/user.go
@@ -61,6 +61,8 @@ type UserEdges struct {
Subscriptions []*UserSubscription `json:"subscriptions,omitempty"`
// AssignedSubscriptions holds the value of the assigned_subscriptions edge.
AssignedSubscriptions []*UserSubscription `json:"assigned_subscriptions,omitempty"`
+ // AnnouncementReads holds the value of the announcement_reads edge.
+ AnnouncementReads []*AnnouncementRead `json:"announcement_reads,omitempty"`
// AllowedGroups holds the value of the allowed_groups edge.
AllowedGroups []*Group `json:"allowed_groups,omitempty"`
// UsageLogs holds the value of the usage_logs edge.
@@ -73,7 +75,7 @@ type UserEdges struct {
UserAllowedGroups []*UserAllowedGroup `json:"user_allowed_groups,omitempty"`
// loadedTypes holds the information for reporting if a
// type was loaded (or requested) in eager-loading or not.
- loadedTypes [9]bool
+ loadedTypes [10]bool
}
// APIKeysOrErr returns the APIKeys value or an error if the edge
@@ -112,10 +114,19 @@ func (e UserEdges) AssignedSubscriptionsOrErr() ([]*UserSubscription, error) {
return nil, &NotLoadedError{edge: "assigned_subscriptions"}
}
+// AnnouncementReadsOrErr returns the AnnouncementReads value or an error if the edge
+// was not loaded in eager-loading.
+func (e UserEdges) AnnouncementReadsOrErr() ([]*AnnouncementRead, error) {
+ if e.loadedTypes[4] {
+ return e.AnnouncementReads, nil
+ }
+ return nil, &NotLoadedError{edge: "announcement_reads"}
+}
+
// AllowedGroupsOrErr returns the AllowedGroups value or an error if the edge
// was not loaded in eager-loading.
func (e UserEdges) AllowedGroupsOrErr() ([]*Group, error) {
- if e.loadedTypes[4] {
+ if e.loadedTypes[5] {
return e.AllowedGroups, nil
}
return nil, &NotLoadedError{edge: "allowed_groups"}
@@ -124,7 +135,7 @@ func (e UserEdges) AllowedGroupsOrErr() ([]*Group, error) {
// UsageLogsOrErr returns the UsageLogs value or an error if the edge
// was not loaded in eager-loading.
func (e UserEdges) UsageLogsOrErr() ([]*UsageLog, error) {
- if e.loadedTypes[5] {
+ if e.loadedTypes[6] {
return e.UsageLogs, nil
}
return nil, &NotLoadedError{edge: "usage_logs"}
@@ -133,7 +144,7 @@ func (e UserEdges) UsageLogsOrErr() ([]*UsageLog, error) {
// AttributeValuesOrErr returns the AttributeValues value or an error if the edge
// was not loaded in eager-loading.
func (e UserEdges) AttributeValuesOrErr() ([]*UserAttributeValue, error) {
- if e.loadedTypes[6] {
+ if e.loadedTypes[7] {
return e.AttributeValues, nil
}
return nil, &NotLoadedError{edge: "attribute_values"}
@@ -142,7 +153,7 @@ func (e UserEdges) AttributeValuesOrErr() ([]*UserAttributeValue, error) {
// PromoCodeUsagesOrErr returns the PromoCodeUsages value or an error if the edge
// was not loaded in eager-loading.
func (e UserEdges) PromoCodeUsagesOrErr() ([]*PromoCodeUsage, error) {
- if e.loadedTypes[7] {
+ if e.loadedTypes[8] {
return e.PromoCodeUsages, nil
}
return nil, &NotLoadedError{edge: "promo_code_usages"}
@@ -151,7 +162,7 @@ func (e UserEdges) PromoCodeUsagesOrErr() ([]*PromoCodeUsage, error) {
// UserAllowedGroupsOrErr returns the UserAllowedGroups value or an error if the edge
// was not loaded in eager-loading.
func (e UserEdges) UserAllowedGroupsOrErr() ([]*UserAllowedGroup, error) {
- if e.loadedTypes[8] {
+ if e.loadedTypes[9] {
return e.UserAllowedGroups, nil
}
return nil, &NotLoadedError{edge: "user_allowed_groups"}
@@ -313,6 +324,11 @@ func (_m *User) QueryAssignedSubscriptions() *UserSubscriptionQuery {
return NewUserClient(_m.config).QueryAssignedSubscriptions(_m)
}
+// QueryAnnouncementReads queries the "announcement_reads" edge of the User entity.
+func (_m *User) QueryAnnouncementReads() *AnnouncementReadQuery {
+ return NewUserClient(_m.config).QueryAnnouncementReads(_m)
+}
+
// QueryAllowedGroups queries the "allowed_groups" edge of the User entity.
func (_m *User) QueryAllowedGroups() *GroupQuery {
return NewUserClient(_m.config).QueryAllowedGroups(_m)
diff --git a/backend/ent/user/user.go b/backend/ent/user/user.go
index 0685ed72..ae9418ff 100644
--- a/backend/ent/user/user.go
+++ b/backend/ent/user/user.go
@@ -51,6 +51,8 @@ const (
EdgeSubscriptions = "subscriptions"
// EdgeAssignedSubscriptions holds the string denoting the assigned_subscriptions edge name in mutations.
EdgeAssignedSubscriptions = "assigned_subscriptions"
+ // EdgeAnnouncementReads holds the string denoting the announcement_reads edge name in mutations.
+ EdgeAnnouncementReads = "announcement_reads"
// EdgeAllowedGroups holds the string denoting the allowed_groups edge name in mutations.
EdgeAllowedGroups = "allowed_groups"
// EdgeUsageLogs holds the string denoting the usage_logs edge name in mutations.
@@ -91,6 +93,13 @@ const (
AssignedSubscriptionsInverseTable = "user_subscriptions"
// AssignedSubscriptionsColumn is the table column denoting the assigned_subscriptions relation/edge.
AssignedSubscriptionsColumn = "assigned_by"
+ // AnnouncementReadsTable is the table that holds the announcement_reads relation/edge.
+ AnnouncementReadsTable = "announcement_reads"
+ // AnnouncementReadsInverseTable is the table name for the AnnouncementRead entity.
+ // It exists in this package in order to avoid circular dependency with the "announcementread" package.
+ AnnouncementReadsInverseTable = "announcement_reads"
+ // AnnouncementReadsColumn is the table column denoting the announcement_reads relation/edge.
+ AnnouncementReadsColumn = "user_id"
// AllowedGroupsTable is the table that holds the allowed_groups relation/edge. The primary key declared below.
AllowedGroupsTable = "user_allowed_groups"
// AllowedGroupsInverseTable is the table name for the Group entity.
@@ -335,6 +344,20 @@ func ByAssignedSubscriptions(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOp
}
}
+// ByAnnouncementReadsCount orders the results by announcement_reads count.
+func ByAnnouncementReadsCount(opts ...sql.OrderTermOption) OrderOption {
+ return func(s *sql.Selector) {
+ sqlgraph.OrderByNeighborsCount(s, newAnnouncementReadsStep(), opts...)
+ }
+}
+
+// ByAnnouncementReads orders the results by announcement_reads terms.
+func ByAnnouncementReads(term sql.OrderTerm, terms ...sql.OrderTerm) OrderOption {
+ return func(s *sql.Selector) {
+ sqlgraph.OrderByNeighborTerms(s, newAnnouncementReadsStep(), append([]sql.OrderTerm{term}, terms...)...)
+ }
+}
+
// ByAllowedGroupsCount orders the results by allowed_groups count.
func ByAllowedGroupsCount(opts ...sql.OrderTermOption) OrderOption {
return func(s *sql.Selector) {
@@ -432,6 +455,13 @@ func newAssignedSubscriptionsStep() *sqlgraph.Step {
sqlgraph.Edge(sqlgraph.O2M, false, AssignedSubscriptionsTable, AssignedSubscriptionsColumn),
)
}
+func newAnnouncementReadsStep() *sqlgraph.Step {
+ return sqlgraph.NewStep(
+ sqlgraph.From(Table, FieldID),
+ sqlgraph.To(AnnouncementReadsInverseTable, FieldID),
+ sqlgraph.Edge(sqlgraph.O2M, false, AnnouncementReadsTable, AnnouncementReadsColumn),
+ )
+}
func newAllowedGroupsStep() *sqlgraph.Step {
return sqlgraph.NewStep(
sqlgraph.From(Table, FieldID),
diff --git a/backend/ent/user/where.go b/backend/ent/user/where.go
index 3dc4fec7..1de61037 100644
--- a/backend/ent/user/where.go
+++ b/backend/ent/user/where.go
@@ -952,6 +952,29 @@ func HasAssignedSubscriptionsWith(preds ...predicate.UserSubscription) predicate
})
}
+// HasAnnouncementReads applies the HasEdge predicate on the "announcement_reads" edge.
+func HasAnnouncementReads() predicate.User {
+ return predicate.User(func(s *sql.Selector) {
+ step := sqlgraph.NewStep(
+ sqlgraph.From(Table, FieldID),
+ sqlgraph.Edge(sqlgraph.O2M, false, AnnouncementReadsTable, AnnouncementReadsColumn),
+ )
+ sqlgraph.HasNeighbors(s, step)
+ })
+}
+
+// HasAnnouncementReadsWith applies the HasEdge predicate on the "announcement_reads" edge with a given conditions (other predicates).
+func HasAnnouncementReadsWith(preds ...predicate.AnnouncementRead) predicate.User {
+ return predicate.User(func(s *sql.Selector) {
+ step := newAnnouncementReadsStep()
+ sqlgraph.HasNeighborsWith(s, step, func(s *sql.Selector) {
+ for _, p := range preds {
+ p(s)
+ }
+ })
+ })
+}
+
// HasAllowedGroups applies the HasEdge predicate on the "allowed_groups" edge.
func HasAllowedGroups() predicate.User {
return predicate.User(func(s *sql.Selector) {
diff --git a/backend/ent/user_create.go b/backend/ent/user_create.go
index 6b4ebc59..f862a580 100644
--- a/backend/ent/user_create.go
+++ b/backend/ent/user_create.go
@@ -11,6 +11,7 @@ import (
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
+ "github.com/Wei-Shaw/sub2api/ent/announcementread"
"github.com/Wei-Shaw/sub2api/ent/apikey"
"github.com/Wei-Shaw/sub2api/ent/group"
"github.com/Wei-Shaw/sub2api/ent/promocodeusage"
@@ -269,6 +270,21 @@ func (_c *UserCreate) AddAssignedSubscriptions(v ...*UserSubscription) *UserCrea
return _c.AddAssignedSubscriptionIDs(ids...)
}
+// AddAnnouncementReadIDs adds the "announcement_reads" edge to the AnnouncementRead entity by IDs.
+func (_c *UserCreate) AddAnnouncementReadIDs(ids ...int64) *UserCreate {
+ _c.mutation.AddAnnouncementReadIDs(ids...)
+ return _c
+}
+
+// AddAnnouncementReads adds the "announcement_reads" edges to the AnnouncementRead entity.
+func (_c *UserCreate) AddAnnouncementReads(v ...*AnnouncementRead) *UserCreate {
+ ids := make([]int64, len(v))
+ for i := range v {
+ ids[i] = v[i].ID
+ }
+ return _c.AddAnnouncementReadIDs(ids...)
+}
+
// AddAllowedGroupIDs adds the "allowed_groups" edge to the Group entity by IDs.
func (_c *UserCreate) AddAllowedGroupIDs(ids ...int64) *UserCreate {
_c.mutation.AddAllowedGroupIDs(ids...)
@@ -618,6 +634,22 @@ func (_c *UserCreate) createSpec() (*User, *sqlgraph.CreateSpec) {
}
_spec.Edges = append(_spec.Edges, edge)
}
+ if nodes := _c.mutation.AnnouncementReadsIDs(); len(nodes) > 0 {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.O2M,
+ Inverse: false,
+ Table: user.AnnouncementReadsTable,
+ Columns: []string{user.AnnouncementReadsColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64),
+ },
+ }
+ for _, k := range nodes {
+ edge.Target.Nodes = append(edge.Target.Nodes, k)
+ }
+ _spec.Edges = append(_spec.Edges, edge)
+ }
if nodes := _c.mutation.AllowedGroupsIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2M,
diff --git a/backend/ent/user_query.go b/backend/ent/user_query.go
index e66e2dc8..4b56e16f 100644
--- a/backend/ent/user_query.go
+++ b/backend/ent/user_query.go
@@ -13,6 +13,7 @@ import (
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
+ "github.com/Wei-Shaw/sub2api/ent/announcementread"
"github.com/Wei-Shaw/sub2api/ent/apikey"
"github.com/Wei-Shaw/sub2api/ent/group"
"github.com/Wei-Shaw/sub2api/ent/predicate"
@@ -36,6 +37,7 @@ type UserQuery struct {
withRedeemCodes *RedeemCodeQuery
withSubscriptions *UserSubscriptionQuery
withAssignedSubscriptions *UserSubscriptionQuery
+ withAnnouncementReads *AnnouncementReadQuery
withAllowedGroups *GroupQuery
withUsageLogs *UsageLogQuery
withAttributeValues *UserAttributeValueQuery
@@ -166,6 +168,28 @@ func (_q *UserQuery) QueryAssignedSubscriptions() *UserSubscriptionQuery {
return query
}
+// QueryAnnouncementReads chains the current query on the "announcement_reads" edge.
+func (_q *UserQuery) QueryAnnouncementReads() *AnnouncementReadQuery {
+ query := (&AnnouncementReadClient{config: _q.config}).Query()
+ query.path = func(ctx context.Context) (fromU *sql.Selector, err error) {
+ if err := _q.prepareQuery(ctx); err != nil {
+ return nil, err
+ }
+ selector := _q.sqlQuery(ctx)
+ if err := selector.Err(); err != nil {
+ return nil, err
+ }
+ step := sqlgraph.NewStep(
+ sqlgraph.From(user.Table, user.FieldID, selector),
+ sqlgraph.To(announcementread.Table, announcementread.FieldID),
+ sqlgraph.Edge(sqlgraph.O2M, false, user.AnnouncementReadsTable, user.AnnouncementReadsColumn),
+ )
+ fromU = sqlgraph.SetNeighbors(_q.driver.Dialect(), step)
+ return fromU, nil
+ }
+ return query
+}
+
// QueryAllowedGroups chains the current query on the "allowed_groups" edge.
func (_q *UserQuery) QueryAllowedGroups() *GroupQuery {
query := (&GroupClient{config: _q.config}).Query()
@@ -472,6 +496,7 @@ func (_q *UserQuery) Clone() *UserQuery {
withRedeemCodes: _q.withRedeemCodes.Clone(),
withSubscriptions: _q.withSubscriptions.Clone(),
withAssignedSubscriptions: _q.withAssignedSubscriptions.Clone(),
+ withAnnouncementReads: _q.withAnnouncementReads.Clone(),
withAllowedGroups: _q.withAllowedGroups.Clone(),
withUsageLogs: _q.withUsageLogs.Clone(),
withAttributeValues: _q.withAttributeValues.Clone(),
@@ -527,6 +552,17 @@ func (_q *UserQuery) WithAssignedSubscriptions(opts ...func(*UserSubscriptionQue
return _q
}
+// WithAnnouncementReads tells the query-builder to eager-load the nodes that are connected to
+// the "announcement_reads" edge. The optional arguments are used to configure the query builder of the edge.
+func (_q *UserQuery) WithAnnouncementReads(opts ...func(*AnnouncementReadQuery)) *UserQuery {
+ query := (&AnnouncementReadClient{config: _q.config}).Query()
+ for _, opt := range opts {
+ opt(query)
+ }
+ _q.withAnnouncementReads = query
+ return _q
+}
+
// WithAllowedGroups tells the query-builder to eager-load the nodes that are connected to
// the "allowed_groups" edge. The optional arguments are used to configure the query builder of the edge.
func (_q *UserQuery) WithAllowedGroups(opts ...func(*GroupQuery)) *UserQuery {
@@ -660,11 +696,12 @@ func (_q *UserQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*User, e
var (
nodes = []*User{}
_spec = _q.querySpec()
- loadedTypes = [9]bool{
+ loadedTypes = [10]bool{
_q.withAPIKeys != nil,
_q.withRedeemCodes != nil,
_q.withSubscriptions != nil,
_q.withAssignedSubscriptions != nil,
+ _q.withAnnouncementReads != nil,
_q.withAllowedGroups != nil,
_q.withUsageLogs != nil,
_q.withAttributeValues != nil,
@@ -723,6 +760,13 @@ func (_q *UserQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*User, e
return nil, err
}
}
+ if query := _q.withAnnouncementReads; query != nil {
+ if err := _q.loadAnnouncementReads(ctx, query, nodes,
+ func(n *User) { n.Edges.AnnouncementReads = []*AnnouncementRead{} },
+ func(n *User, e *AnnouncementRead) { n.Edges.AnnouncementReads = append(n.Edges.AnnouncementReads, e) }); err != nil {
+ return nil, err
+ }
+ }
if query := _q.withAllowedGroups; query != nil {
if err := _q.loadAllowedGroups(ctx, query, nodes,
func(n *User) { n.Edges.AllowedGroups = []*Group{} },
@@ -887,6 +931,36 @@ func (_q *UserQuery) loadAssignedSubscriptions(ctx context.Context, query *UserS
}
return nil
}
+func (_q *UserQuery) loadAnnouncementReads(ctx context.Context, query *AnnouncementReadQuery, nodes []*User, init func(*User), assign func(*User, *AnnouncementRead)) error {
+ fks := make([]driver.Value, 0, len(nodes))
+ nodeids := make(map[int64]*User)
+ for i := range nodes {
+ fks = append(fks, nodes[i].ID)
+ nodeids[nodes[i].ID] = nodes[i]
+ if init != nil {
+ init(nodes[i])
+ }
+ }
+ if len(query.ctx.Fields) > 0 {
+ query.ctx.AppendFieldOnce(announcementread.FieldUserID)
+ }
+ query.Where(predicate.AnnouncementRead(func(s *sql.Selector) {
+ s.Where(sql.InValues(s.C(user.AnnouncementReadsColumn), fks...))
+ }))
+ neighbors, err := query.All(ctx)
+ if err != nil {
+ return err
+ }
+ for _, n := range neighbors {
+ fk := n.UserID
+ node, ok := nodeids[fk]
+ if !ok {
+ return fmt.Errorf(`unexpected referenced foreign-key "user_id" returned %v for node %v`, fk, n.ID)
+ }
+ assign(node, n)
+ }
+ return nil
+}
func (_q *UserQuery) loadAllowedGroups(ctx context.Context, query *GroupQuery, nodes []*User, init func(*User), assign func(*User, *Group)) error {
edgeIDs := make([]driver.Value, len(nodes))
byID := make(map[int64]*User)
diff --git a/backend/ent/user_update.go b/backend/ent/user_update.go
index b98a41c6..80222c92 100644
--- a/backend/ent/user_update.go
+++ b/backend/ent/user_update.go
@@ -11,6 +11,7 @@ import (
"entgo.io/ent/dialect/sql"
"entgo.io/ent/dialect/sql/sqlgraph"
"entgo.io/ent/schema/field"
+ "github.com/Wei-Shaw/sub2api/ent/announcementread"
"github.com/Wei-Shaw/sub2api/ent/apikey"
"github.com/Wei-Shaw/sub2api/ent/group"
"github.com/Wei-Shaw/sub2api/ent/predicate"
@@ -301,6 +302,21 @@ func (_u *UserUpdate) AddAssignedSubscriptions(v ...*UserSubscription) *UserUpda
return _u.AddAssignedSubscriptionIDs(ids...)
}
+// AddAnnouncementReadIDs adds the "announcement_reads" edge to the AnnouncementRead entity by IDs.
+func (_u *UserUpdate) AddAnnouncementReadIDs(ids ...int64) *UserUpdate {
+ _u.mutation.AddAnnouncementReadIDs(ids...)
+ return _u
+}
+
+// AddAnnouncementReads adds the "announcement_reads" edges to the AnnouncementRead entity.
+func (_u *UserUpdate) AddAnnouncementReads(v ...*AnnouncementRead) *UserUpdate {
+ ids := make([]int64, len(v))
+ for i := range v {
+ ids[i] = v[i].ID
+ }
+ return _u.AddAnnouncementReadIDs(ids...)
+}
+
// AddAllowedGroupIDs adds the "allowed_groups" edge to the Group entity by IDs.
func (_u *UserUpdate) AddAllowedGroupIDs(ids ...int64) *UserUpdate {
_u.mutation.AddAllowedGroupIDs(ids...)
@@ -450,6 +466,27 @@ func (_u *UserUpdate) RemoveAssignedSubscriptions(v ...*UserSubscription) *UserU
return _u.RemoveAssignedSubscriptionIDs(ids...)
}
+// ClearAnnouncementReads clears all "announcement_reads" edges to the AnnouncementRead entity.
+func (_u *UserUpdate) ClearAnnouncementReads() *UserUpdate {
+ _u.mutation.ClearAnnouncementReads()
+ return _u
+}
+
+// RemoveAnnouncementReadIDs removes the "announcement_reads" edge to AnnouncementRead entities by IDs.
+func (_u *UserUpdate) RemoveAnnouncementReadIDs(ids ...int64) *UserUpdate {
+ _u.mutation.RemoveAnnouncementReadIDs(ids...)
+ return _u
+}
+
+// RemoveAnnouncementReads removes "announcement_reads" edges to AnnouncementRead entities.
+func (_u *UserUpdate) RemoveAnnouncementReads(v ...*AnnouncementRead) *UserUpdate {
+ ids := make([]int64, len(v))
+ for i := range v {
+ ids[i] = v[i].ID
+ }
+ return _u.RemoveAnnouncementReadIDs(ids...)
+}
+
// ClearAllowedGroups clears all "allowed_groups" edges to the Group entity.
func (_u *UserUpdate) ClearAllowedGroups() *UserUpdate {
_u.mutation.ClearAllowedGroups()
@@ -852,6 +889,51 @@ func (_u *UserUpdate) sqlSave(ctx context.Context) (_node int, err error) {
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
+ if _u.mutation.AnnouncementReadsCleared() {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.O2M,
+ Inverse: false,
+ Table: user.AnnouncementReadsTable,
+ Columns: []string{user.AnnouncementReadsColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64),
+ },
+ }
+ _spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+ }
+ if nodes := _u.mutation.RemovedAnnouncementReadsIDs(); len(nodes) > 0 && !_u.mutation.AnnouncementReadsCleared() {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.O2M,
+ Inverse: false,
+ Table: user.AnnouncementReadsTable,
+ Columns: []string{user.AnnouncementReadsColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64),
+ },
+ }
+ for _, k := range nodes {
+ edge.Target.Nodes = append(edge.Target.Nodes, k)
+ }
+ _spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+ }
+ if nodes := _u.mutation.AnnouncementReadsIDs(); len(nodes) > 0 {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.O2M,
+ Inverse: false,
+ Table: user.AnnouncementReadsTable,
+ Columns: []string{user.AnnouncementReadsColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64),
+ },
+ }
+ for _, k := range nodes {
+ edge.Target.Nodes = append(edge.Target.Nodes, k)
+ }
+ _spec.Edges.Add = append(_spec.Edges.Add, edge)
+ }
if _u.mutation.AllowedGroupsCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2M,
@@ -1330,6 +1412,21 @@ func (_u *UserUpdateOne) AddAssignedSubscriptions(v ...*UserSubscription) *UserU
return _u.AddAssignedSubscriptionIDs(ids...)
}
+// AddAnnouncementReadIDs adds the "announcement_reads" edge to the AnnouncementRead entity by IDs.
+func (_u *UserUpdateOne) AddAnnouncementReadIDs(ids ...int64) *UserUpdateOne {
+ _u.mutation.AddAnnouncementReadIDs(ids...)
+ return _u
+}
+
+// AddAnnouncementReads adds the "announcement_reads" edges to the AnnouncementRead entity.
+func (_u *UserUpdateOne) AddAnnouncementReads(v ...*AnnouncementRead) *UserUpdateOne {
+ ids := make([]int64, len(v))
+ for i := range v {
+ ids[i] = v[i].ID
+ }
+ return _u.AddAnnouncementReadIDs(ids...)
+}
+
// AddAllowedGroupIDs adds the "allowed_groups" edge to the Group entity by IDs.
func (_u *UserUpdateOne) AddAllowedGroupIDs(ids ...int64) *UserUpdateOne {
_u.mutation.AddAllowedGroupIDs(ids...)
@@ -1479,6 +1576,27 @@ func (_u *UserUpdateOne) RemoveAssignedSubscriptions(v ...*UserSubscription) *Us
return _u.RemoveAssignedSubscriptionIDs(ids...)
}
+// ClearAnnouncementReads clears all "announcement_reads" edges to the AnnouncementRead entity.
+func (_u *UserUpdateOne) ClearAnnouncementReads() *UserUpdateOne {
+ _u.mutation.ClearAnnouncementReads()
+ return _u
+}
+
+// RemoveAnnouncementReadIDs removes the "announcement_reads" edge to AnnouncementRead entities by IDs.
+func (_u *UserUpdateOne) RemoveAnnouncementReadIDs(ids ...int64) *UserUpdateOne {
+ _u.mutation.RemoveAnnouncementReadIDs(ids...)
+ return _u
+}
+
+// RemoveAnnouncementReads removes "announcement_reads" edges to AnnouncementRead entities.
+func (_u *UserUpdateOne) RemoveAnnouncementReads(v ...*AnnouncementRead) *UserUpdateOne {
+ ids := make([]int64, len(v))
+ for i := range v {
+ ids[i] = v[i].ID
+ }
+ return _u.RemoveAnnouncementReadIDs(ids...)
+}
+
// ClearAllowedGroups clears all "allowed_groups" edges to the Group entity.
func (_u *UserUpdateOne) ClearAllowedGroups() *UserUpdateOne {
_u.mutation.ClearAllowedGroups()
@@ -1911,6 +2029,51 @@ func (_u *UserUpdateOne) sqlSave(ctx context.Context) (_node *User, err error) {
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
+ if _u.mutation.AnnouncementReadsCleared() {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.O2M,
+ Inverse: false,
+ Table: user.AnnouncementReadsTable,
+ Columns: []string{user.AnnouncementReadsColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64),
+ },
+ }
+ _spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+ }
+ if nodes := _u.mutation.RemovedAnnouncementReadsIDs(); len(nodes) > 0 && !_u.mutation.AnnouncementReadsCleared() {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.O2M,
+ Inverse: false,
+ Table: user.AnnouncementReadsTable,
+ Columns: []string{user.AnnouncementReadsColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64),
+ },
+ }
+ for _, k := range nodes {
+ edge.Target.Nodes = append(edge.Target.Nodes, k)
+ }
+ _spec.Edges.Clear = append(_spec.Edges.Clear, edge)
+ }
+ if nodes := _u.mutation.AnnouncementReadsIDs(); len(nodes) > 0 {
+ edge := &sqlgraph.EdgeSpec{
+ Rel: sqlgraph.O2M,
+ Inverse: false,
+ Table: user.AnnouncementReadsTable,
+ Columns: []string{user.AnnouncementReadsColumn},
+ Bidi: false,
+ Target: &sqlgraph.EdgeTarget{
+ IDSpec: sqlgraph.NewFieldSpec(announcementread.FieldID, field.TypeInt64),
+ },
+ }
+ for _, k := range nodes {
+ edge.Target.Nodes = append(edge.Target.Nodes, k)
+ }
+ _spec.Edges.Add = append(_spec.Edges.Add, edge)
+ }
if _u.mutation.AllowedGroupsCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2M,
diff --git a/backend/internal/domain/announcement.go b/backend/internal/domain/announcement.go
new file mode 100644
index 00000000..7dc9a9cc
--- /dev/null
+++ b/backend/internal/domain/announcement.go
@@ -0,0 +1,226 @@
+package domain
+
+import (
+ "strings"
+ "time"
+
+ infraerrors "github.com/Wei-Shaw/sub2api/internal/pkg/errors"
+)
+
+const (
+ AnnouncementStatusDraft = "draft"
+ AnnouncementStatusActive = "active"
+ AnnouncementStatusArchived = "archived"
+)
+
+const (
+ AnnouncementConditionTypeSubscription = "subscription"
+ AnnouncementConditionTypeBalance = "balance"
+)
+
+const (
+ AnnouncementOperatorIn = "in"
+ AnnouncementOperatorGT = "gt"
+ AnnouncementOperatorGTE = "gte"
+ AnnouncementOperatorLT = "lt"
+ AnnouncementOperatorLTE = "lte"
+ AnnouncementOperatorEQ = "eq"
+)
+
+var (
+ ErrAnnouncementNotFound = infraerrors.NotFound("ANNOUNCEMENT_NOT_FOUND", "announcement not found")
+ ErrAnnouncementInvalidTarget = infraerrors.BadRequest("ANNOUNCEMENT_INVALID_TARGET", "invalid announcement targeting rules")
+)
+
+type AnnouncementTargeting struct {
+ // AnyOf 表示 OR:任意一个条件组满足即可展示。
+ AnyOf []AnnouncementConditionGroup `json:"any_of,omitempty"`
+}
+
+type AnnouncementConditionGroup struct {
+ // AllOf 表示 AND:组内所有条件都满足才算命中该组。
+ AllOf []AnnouncementCondition `json:"all_of,omitempty"`
+}
+
+type AnnouncementCondition struct {
+ // Type: subscription | balance
+ Type string `json:"type"`
+
+ // Operator:
+ // - subscription: in
+ // - balance: gt/gte/lt/lte/eq
+ Operator string `json:"operator"`
+
+ // subscription 条件:匹配的订阅套餐(group_id)
+ GroupIDs []int64 `json:"group_ids,omitempty"`
+
+ // balance 条件:比较阈值
+ Value float64 `json:"value,omitempty"`
+}
+
+func (t AnnouncementTargeting) Matches(balance float64, activeSubscriptionGroupIDs map[int64]struct{}) bool {
+ // 空规则:展示给所有用户
+ if len(t.AnyOf) == 0 {
+ return true
+ }
+
+ for _, group := range t.AnyOf {
+ if len(group.AllOf) == 0 {
+ // 空条件组不命中(避免 OR 中出现无条件 “全命中”)
+ continue
+ }
+ allMatched := true
+ for _, cond := range group.AllOf {
+ if !cond.Matches(balance, activeSubscriptionGroupIDs) {
+ allMatched = false
+ break
+ }
+ }
+ if allMatched {
+ return true
+ }
+ }
+
+ return false
+}
+
+func (c AnnouncementCondition) Matches(balance float64, activeSubscriptionGroupIDs map[int64]struct{}) bool {
+ switch c.Type {
+ case AnnouncementConditionTypeSubscription:
+ if c.Operator != AnnouncementOperatorIn {
+ return false
+ }
+ if len(c.GroupIDs) == 0 {
+ return false
+ }
+ if len(activeSubscriptionGroupIDs) == 0 {
+ return false
+ }
+ for _, gid := range c.GroupIDs {
+ if _, ok := activeSubscriptionGroupIDs[gid]; ok {
+ return true
+ }
+ }
+ return false
+
+ case AnnouncementConditionTypeBalance:
+ switch c.Operator {
+ case AnnouncementOperatorGT:
+ return balance > c.Value
+ case AnnouncementOperatorGTE:
+ return balance >= c.Value
+ case AnnouncementOperatorLT:
+ return balance < c.Value
+ case AnnouncementOperatorLTE:
+ return balance <= c.Value
+ case AnnouncementOperatorEQ:
+ return balance == c.Value
+ default:
+ return false
+ }
+
+ default:
+ return false
+ }
+}
+
+func (t AnnouncementTargeting) NormalizeAndValidate() (AnnouncementTargeting, error) {
+ normalized := AnnouncementTargeting{AnyOf: make([]AnnouncementConditionGroup, 0, len(t.AnyOf))}
+
+ // 允许空 targeting(展示给所有用户)
+ if len(t.AnyOf) == 0 {
+ return normalized, nil
+ }
+
+ if len(t.AnyOf) > 50 {
+ return AnnouncementTargeting{}, ErrAnnouncementInvalidTarget
+ }
+
+ for _, g := range t.AnyOf {
+ if len(g.AllOf) == 0 {
+ return AnnouncementTargeting{}, ErrAnnouncementInvalidTarget
+ }
+ if len(g.AllOf) > 50 {
+ return AnnouncementTargeting{}, ErrAnnouncementInvalidTarget
+ }
+
+ group := AnnouncementConditionGroup{AllOf: make([]AnnouncementCondition, 0, len(g.AllOf))}
+ for _, c := range g.AllOf {
+ cond := AnnouncementCondition{
+ Type: strings.TrimSpace(c.Type),
+ Operator: strings.TrimSpace(c.Operator),
+ Value: c.Value,
+ }
+ for _, gid := range c.GroupIDs {
+ if gid <= 0 {
+ return AnnouncementTargeting{}, ErrAnnouncementInvalidTarget
+ }
+ cond.GroupIDs = append(cond.GroupIDs, gid)
+ }
+
+ if err := cond.validate(); err != nil {
+ return AnnouncementTargeting{}, err
+ }
+ group.AllOf = append(group.AllOf, cond)
+ }
+
+ normalized.AnyOf = append(normalized.AnyOf, group)
+ }
+
+ return normalized, nil
+}
+
+func (c AnnouncementCondition) validate() error {
+ switch c.Type {
+ case AnnouncementConditionTypeSubscription:
+ if c.Operator != AnnouncementOperatorIn {
+ return ErrAnnouncementInvalidTarget
+ }
+ if len(c.GroupIDs) == 0 {
+ return ErrAnnouncementInvalidTarget
+ }
+ return nil
+
+ case AnnouncementConditionTypeBalance:
+ switch c.Operator {
+ case AnnouncementOperatorGT, AnnouncementOperatorGTE, AnnouncementOperatorLT, AnnouncementOperatorLTE, AnnouncementOperatorEQ:
+ return nil
+ default:
+ return ErrAnnouncementInvalidTarget
+ }
+
+ default:
+ return ErrAnnouncementInvalidTarget
+ }
+}
+
+type Announcement struct {
+ ID int64
+ Title string
+ Content string
+ Status string
+ Targeting AnnouncementTargeting
+ StartsAt *time.Time
+ EndsAt *time.Time
+ CreatedBy *int64
+ UpdatedBy *int64
+ CreatedAt time.Time
+ UpdatedAt time.Time
+}
+
+func (a *Announcement) IsActiveAt(now time.Time) bool {
+ if a == nil {
+ return false
+ }
+ if a.Status != AnnouncementStatusActive {
+ return false
+ }
+ if a.StartsAt != nil && now.Before(*a.StartsAt) {
+ return false
+ }
+ if a.EndsAt != nil && !now.Before(*a.EndsAt) {
+ // ends_at 语义:到点即下线
+ return false
+ }
+ return true
+}
diff --git a/backend/internal/domain/constants.go b/backend/internal/domain/constants.go
new file mode 100644
index 00000000..4ecea9d8
--- /dev/null
+++ b/backend/internal/domain/constants.go
@@ -0,0 +1,64 @@
+package domain
+
+// Status constants
+const (
+ StatusActive = "active"
+ StatusDisabled = "disabled"
+ StatusError = "error"
+ StatusUnused = "unused"
+ StatusUsed = "used"
+ StatusExpired = "expired"
+)
+
+// Role constants
+const (
+ RoleAdmin = "admin"
+ RoleUser = "user"
+)
+
+// Platform constants
+const (
+ PlatformAnthropic = "anthropic"
+ PlatformOpenAI = "openai"
+ PlatformGemini = "gemini"
+ PlatformAntigravity = "antigravity"
+)
+
+// Account type constants
+const (
+ AccountTypeOAuth = "oauth" // OAuth类型账号(full scope: profile + inference)
+ AccountTypeSetupToken = "setup-token" // Setup Token类型账号(inference only scope)
+ AccountTypeAPIKey = "apikey" // API Key类型账号
+)
+
+// Redeem type constants
+const (
+ RedeemTypeBalance = "balance"
+ RedeemTypeConcurrency = "concurrency"
+ RedeemTypeSubscription = "subscription"
+)
+
+// PromoCode status constants
+const (
+ PromoCodeStatusActive = "active"
+ PromoCodeStatusDisabled = "disabled"
+)
+
+// Admin adjustment type constants
+const (
+ AdjustmentTypeAdminBalance = "admin_balance" // 管理员调整余额
+ AdjustmentTypeAdminConcurrency = "admin_concurrency" // 管理员调整并发数
+)
+
+// Group subscription type constants
+const (
+ SubscriptionTypeStandard = "standard" // 标准计费模式(按余额扣费)
+ SubscriptionTypeSubscription = "subscription" // 订阅模式(按限额控制)
+)
+
+// Subscription status constants
+const (
+ SubscriptionStatusActive = "active"
+ SubscriptionStatusExpired = "expired"
+ SubscriptionStatusSuspended = "suspended"
+)
diff --git a/backend/internal/handler/admin/announcement_handler.go b/backend/internal/handler/admin/announcement_handler.go
new file mode 100644
index 00000000..a4e9f2f0
--- /dev/null
+++ b/backend/internal/handler/admin/announcement_handler.go
@@ -0,0 +1,247 @@
+package admin
+
+import (
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/Wei-Shaw/sub2api/internal/handler/dto"
+ "github.com/Wei-Shaw/sub2api/internal/pkg/pagination"
+ "github.com/Wei-Shaw/sub2api/internal/pkg/response"
+ middleware2 "github.com/Wei-Shaw/sub2api/internal/server/middleware"
+ "github.com/Wei-Shaw/sub2api/internal/service"
+
+ "github.com/gin-gonic/gin"
+)
+
+// AnnouncementHandler handles admin announcement management
+type AnnouncementHandler struct {
+ announcementService *service.AnnouncementService
+}
+
+// NewAnnouncementHandler creates a new admin announcement handler
+func NewAnnouncementHandler(announcementService *service.AnnouncementService) *AnnouncementHandler {
+ return &AnnouncementHandler{
+ announcementService: announcementService,
+ }
+}
+
+type CreateAnnouncementRequest struct {
+ Title string `json:"title" binding:"required"`
+ Content string `json:"content" binding:"required"`
+ Status string `json:"status" binding:"omitempty,oneof=draft active archived"`
+ Targeting service.AnnouncementTargeting `json:"targeting"`
+ StartsAt *int64 `json:"starts_at"` // Unix seconds, 0/empty = immediate
+ EndsAt *int64 `json:"ends_at"` // Unix seconds, 0/empty = never
+}
+
+type UpdateAnnouncementRequest struct {
+ Title *string `json:"title"`
+ Content *string `json:"content"`
+ Status *string `json:"status" binding:"omitempty,oneof=draft active archived"`
+ Targeting *service.AnnouncementTargeting `json:"targeting"`
+ StartsAt *int64 `json:"starts_at"` // Unix seconds, 0 = clear
+ EndsAt *int64 `json:"ends_at"` // Unix seconds, 0 = clear
+}
+
+// List handles listing announcements with filters
+// GET /api/v1/admin/announcements
+func (h *AnnouncementHandler) List(c *gin.Context) {
+ page, pageSize := response.ParsePagination(c)
+ status := strings.TrimSpace(c.Query("status"))
+ search := strings.TrimSpace(c.Query("search"))
+ if len(search) > 200 {
+ search = search[:200]
+ }
+
+ params := pagination.PaginationParams{
+ Page: page,
+ PageSize: pageSize,
+ }
+
+ items, paginationResult, err := h.announcementService.List(
+ c.Request.Context(),
+ params,
+ service.AnnouncementListFilters{Status: status, Search: search},
+ )
+ if err != nil {
+ response.ErrorFrom(c, err)
+ return
+ }
+
+ out := make([]dto.Announcement, 0, len(items))
+ for i := range items {
+ out = append(out, *dto.AnnouncementFromService(&items[i]))
+ }
+ response.Paginated(c, out, paginationResult.Total, page, pageSize)
+}
+
+// GetByID handles getting an announcement by ID
+// GET /api/v1/admin/announcements/:id
+func (h *AnnouncementHandler) GetByID(c *gin.Context) {
+ announcementID, err := strconv.ParseInt(c.Param("id"), 10, 64)
+ if err != nil || announcementID <= 0 {
+ response.BadRequest(c, "Invalid announcement ID")
+ return
+ }
+
+ item, err := h.announcementService.GetByID(c.Request.Context(), announcementID)
+ if err != nil {
+ response.ErrorFrom(c, err)
+ return
+ }
+
+ response.Success(c, dto.AnnouncementFromService(item))
+}
+
+// Create handles creating a new announcement
+// POST /api/v1/admin/announcements
+func (h *AnnouncementHandler) Create(c *gin.Context) {
+ var req CreateAnnouncementRequest
+ if err := c.ShouldBindJSON(&req); err != nil {
+ response.BadRequest(c, "Invalid request: "+err.Error())
+ return
+ }
+
+ subject, ok := middleware2.GetAuthSubjectFromContext(c)
+ if !ok {
+ response.Unauthorized(c, "User not found in context")
+ return
+ }
+
+ input := &service.CreateAnnouncementInput{
+ Title: req.Title,
+ Content: req.Content,
+ Status: req.Status,
+ Targeting: req.Targeting,
+ ActorID: &subject.UserID,
+ }
+
+ if req.StartsAt != nil && *req.StartsAt > 0 {
+ t := time.Unix(*req.StartsAt, 0)
+ input.StartsAt = &t
+ }
+ if req.EndsAt != nil && *req.EndsAt > 0 {
+ t := time.Unix(*req.EndsAt, 0)
+ input.EndsAt = &t
+ }
+
+ created, err := h.announcementService.Create(c.Request.Context(), input)
+ if err != nil {
+ response.ErrorFrom(c, err)
+ return
+ }
+
+ response.Success(c, dto.AnnouncementFromService(created))
+}
+
+// Update handles updating an announcement
+// PUT /api/v1/admin/announcements/:id
+func (h *AnnouncementHandler) Update(c *gin.Context) {
+ announcementID, err := strconv.ParseInt(c.Param("id"), 10, 64)
+ if err != nil || announcementID <= 0 {
+ response.BadRequest(c, "Invalid announcement ID")
+ return
+ }
+
+ var req UpdateAnnouncementRequest
+ if err := c.ShouldBindJSON(&req); err != nil {
+ response.BadRequest(c, "Invalid request: "+err.Error())
+ return
+ }
+
+ subject, ok := middleware2.GetAuthSubjectFromContext(c)
+ if !ok {
+ response.Unauthorized(c, "User not found in context")
+ return
+ }
+
+ input := &service.UpdateAnnouncementInput{
+ Title: req.Title,
+ Content: req.Content,
+ Status: req.Status,
+ Targeting: req.Targeting,
+ ActorID: &subject.UserID,
+ }
+
+ if req.StartsAt != nil {
+ if *req.StartsAt == 0 {
+ var cleared *time.Time = nil
+ input.StartsAt = &cleared
+ } else {
+ t := time.Unix(*req.StartsAt, 0)
+ ptr := &t
+ input.StartsAt = &ptr
+ }
+ }
+
+ if req.EndsAt != nil {
+ if *req.EndsAt == 0 {
+ var cleared *time.Time = nil
+ input.EndsAt = &cleared
+ } else {
+ t := time.Unix(*req.EndsAt, 0)
+ ptr := &t
+ input.EndsAt = &ptr
+ }
+ }
+
+ updated, err := h.announcementService.Update(c.Request.Context(), announcementID, input)
+ if err != nil {
+ response.ErrorFrom(c, err)
+ return
+ }
+
+ response.Success(c, dto.AnnouncementFromService(updated))
+}
+
+// Delete handles deleting an announcement
+// DELETE /api/v1/admin/announcements/:id
+func (h *AnnouncementHandler) Delete(c *gin.Context) {
+ announcementID, err := strconv.ParseInt(c.Param("id"), 10, 64)
+ if err != nil || announcementID <= 0 {
+ response.BadRequest(c, "Invalid announcement ID")
+ return
+ }
+
+ if err := h.announcementService.Delete(c.Request.Context(), announcementID); err != nil {
+ response.ErrorFrom(c, err)
+ return
+ }
+
+ response.Success(c, gin.H{"message": "Announcement deleted successfully"})
+}
+
+// ListReadStatus handles listing users read status for an announcement
+// GET /api/v1/admin/announcements/:id/read-status
+func (h *AnnouncementHandler) ListReadStatus(c *gin.Context) {
+ announcementID, err := strconv.ParseInt(c.Param("id"), 10, 64)
+ if err != nil || announcementID <= 0 {
+ response.BadRequest(c, "Invalid announcement ID")
+ return
+ }
+
+ page, pageSize := response.ParsePagination(c)
+ params := pagination.PaginationParams{
+ Page: page,
+ PageSize: pageSize,
+ }
+ search := strings.TrimSpace(c.Query("search"))
+ if len(search) > 200 {
+ search = search[:200]
+ }
+
+ items, paginationResult, err := h.announcementService.ListUserReadStatus(
+ c.Request.Context(),
+ announcementID,
+ params,
+ search,
+ )
+ if err != nil {
+ response.ErrorFrom(c, err)
+ return
+ }
+
+ response.Paginated(c, items, paginationResult.Total, page, pageSize)
+}
+
diff --git a/backend/internal/handler/announcement_handler.go b/backend/internal/handler/announcement_handler.go
new file mode 100644
index 00000000..1e1424eb
--- /dev/null
+++ b/backend/internal/handler/announcement_handler.go
@@ -0,0 +1,82 @@
+package handler
+
+import (
+ "strconv"
+ "strings"
+
+ "github.com/Wei-Shaw/sub2api/internal/handler/dto"
+ "github.com/Wei-Shaw/sub2api/internal/pkg/response"
+ middleware2 "github.com/Wei-Shaw/sub2api/internal/server/middleware"
+ "github.com/Wei-Shaw/sub2api/internal/service"
+
+ "github.com/gin-gonic/gin"
+)
+
+// AnnouncementHandler handles user announcement operations
+type AnnouncementHandler struct {
+ announcementService *service.AnnouncementService
+}
+
+// NewAnnouncementHandler creates a new user announcement handler
+func NewAnnouncementHandler(announcementService *service.AnnouncementService) *AnnouncementHandler {
+ return &AnnouncementHandler{
+ announcementService: announcementService,
+ }
+}
+
+// List handles listing announcements visible to current user
+// GET /api/v1/announcements
+func (h *AnnouncementHandler) List(c *gin.Context) {
+ subject, ok := middleware2.GetAuthSubjectFromContext(c)
+ if !ok {
+ response.Unauthorized(c, "User not found in context")
+ return
+ }
+
+ unreadOnly := parseBoolQuery(c.Query("unread_only"))
+
+ items, err := h.announcementService.ListForUser(c.Request.Context(), subject.UserID, unreadOnly)
+ if err != nil {
+ response.ErrorFrom(c, err)
+ return
+ }
+
+ out := make([]dto.UserAnnouncement, 0, len(items))
+ for i := range items {
+ out = append(out, *dto.UserAnnouncementFromService(&items[i]))
+ }
+ response.Success(c, out)
+}
+
+// MarkRead marks an announcement as read for current user
+// POST /api/v1/announcements/:id/read
+func (h *AnnouncementHandler) MarkRead(c *gin.Context) {
+ subject, ok := middleware2.GetAuthSubjectFromContext(c)
+ if !ok {
+ response.Unauthorized(c, "User not found in context")
+ return
+ }
+
+ announcementID, err := strconv.ParseInt(c.Param("id"), 10, 64)
+ if err != nil || announcementID <= 0 {
+ response.BadRequest(c, "Invalid announcement ID")
+ return
+ }
+
+ if err := h.announcementService.MarkRead(c.Request.Context(), subject.UserID, announcementID); err != nil {
+ response.ErrorFrom(c, err)
+ return
+ }
+
+ response.Success(c, gin.H{"message": "ok"})
+}
+
+func parseBoolQuery(v string) bool {
+ switch strings.TrimSpace(strings.ToLower(v)) {
+ case "1", "true", "yes", "y", "on":
+ return true
+ default:
+ return false
+ }
+}
+
diff --git a/backend/internal/handler/dto/announcement.go b/backend/internal/handler/dto/announcement.go
new file mode 100644
index 00000000..ec2a8ca7
--- /dev/null
+++ b/backend/internal/handler/dto/announcement.go
@@ -0,0 +1,75 @@
+package dto
+
+import (
+ "time"
+
+ "github.com/Wei-Shaw/sub2api/internal/service"
+)
+
+type Announcement struct {
+ ID int64 `json:"id"`
+ Title string `json:"title"`
+ Content string `json:"content"`
+ Status string `json:"status"`
+
+ Targeting service.AnnouncementTargeting `json:"targeting"`
+
+ StartsAt *time.Time `json:"starts_at,omitempty"`
+ EndsAt *time.Time `json:"ends_at,omitempty"`
+
+ CreatedBy *int64 `json:"created_by,omitempty"`
+ UpdatedBy *int64 `json:"updated_by,omitempty"`
+
+ CreatedAt time.Time `json:"created_at"`
+ UpdatedAt time.Time `json:"updated_at"`
+}
+
+type UserAnnouncement struct {
+ ID int64 `json:"id"`
+ Title string `json:"title"`
+ Content string `json:"content"`
+
+ StartsAt *time.Time `json:"starts_at,omitempty"`
+ EndsAt *time.Time `json:"ends_at,omitempty"`
+
+ ReadAt *time.Time `json:"read_at,omitempty"`
+
+ CreatedAt time.Time `json:"created_at"`
+ UpdatedAt time.Time `json:"updated_at"`
+}
+
+func AnnouncementFromService(a *service.Announcement) *Announcement {
+ if a == nil {
+ return nil
+ }
+ return &Announcement{
+ ID: a.ID,
+ Title: a.Title,
+ Content: a.Content,
+ Status: a.Status,
+ Targeting: a.Targeting,
+ StartsAt: a.StartsAt,
+ EndsAt: a.EndsAt,
+ CreatedBy: a.CreatedBy,
+ UpdatedBy: a.UpdatedBy,
+ CreatedAt: a.CreatedAt,
+ UpdatedAt: a.UpdatedAt,
+ }
+}
+
+func UserAnnouncementFromService(a *service.UserAnnouncement) *UserAnnouncement {
+ if a == nil {
+ return nil
+ }
+ return &UserAnnouncement{
+ ID: a.Announcement.ID,
+ Title: a.Announcement.Title,
+ Content: a.Announcement.Content,
+ StartsAt: a.Announcement.StartsAt,
+ EndsAt: a.Announcement.EndsAt,
+ ReadAt: a.ReadAt,
+ CreatedAt: a.Announcement.CreatedAt,
+ UpdatedAt: a.Announcement.UpdatedAt,
+ }
+}
+
diff --git a/backend/internal/handler/handler.go b/backend/internal/handler/handler.go
index 907c314d..b8f7d417 100644
--- a/backend/internal/handler/handler.go
+++ b/backend/internal/handler/handler.go
@@ -10,6 +10,7 @@ type AdminHandlers struct {
User *admin.UserHandler
Group *admin.GroupHandler
Account *admin.AccountHandler
+ Announcement *admin.AnnouncementHandler
OAuth *admin.OAuthHandler
OpenAIOAuth *admin.OpenAIOAuthHandler
GeminiOAuth *admin.GeminiOAuthHandler
@@ -33,6 +34,7 @@ type Handlers struct {
Usage *UsageHandler
Redeem *RedeemHandler
Subscription *SubscriptionHandler
+ Announcement *AnnouncementHandler
Admin *AdminHandlers
Gateway *GatewayHandler
OpenAIGateway *OpenAIGatewayHandler
diff --git a/backend/internal/handler/wire.go b/backend/internal/handler/wire.go
index 92e8edeb..48a3794b 100644
--- a/backend/internal/handler/wire.go
+++ b/backend/internal/handler/wire.go
@@ -13,6 +13,7 @@ func ProvideAdminHandlers(
userHandler *admin.UserHandler,
groupHandler *admin.GroupHandler,
accountHandler *admin.AccountHandler,
+ announcementHandler *admin.AnnouncementHandler,
oauthHandler *admin.OAuthHandler,
openaiOAuthHandler *admin.OpenAIOAuthHandler,
geminiOAuthHandler *admin.GeminiOAuthHandler,
@@ -32,6 +33,7 @@ func ProvideAdminHandlers(
User: userHandler,
Group: groupHandler,
Account: accountHandler,
+ Announcement: announcementHandler,
OAuth: oauthHandler,
OpenAIOAuth: openaiOAuthHandler,
GeminiOAuth: geminiOAuthHandler,
@@ -66,6 +68,7 @@ func ProvideHandlers(
usageHandler *UsageHandler,
redeemHandler *RedeemHandler,
subscriptionHandler *SubscriptionHandler,
+ announcementHandler *AnnouncementHandler,
adminHandlers *AdminHandlers,
gatewayHandler *GatewayHandler,
openaiGatewayHandler *OpenAIGatewayHandler,
@@ -79,6 +82,7 @@ func ProvideHandlers(
Usage: usageHandler,
Redeem: redeemHandler,
Subscription: subscriptionHandler,
+ Announcement: announcementHandler,
Admin: adminHandlers,
Gateway: gatewayHandler,
OpenAIGateway: openaiGatewayHandler,
@@ -96,6 +100,7 @@ var ProviderSet = wire.NewSet(
NewUsageHandler,
NewRedeemHandler,
NewSubscriptionHandler,
+ NewAnnouncementHandler,
NewGatewayHandler,
NewOpenAIGatewayHandler,
NewTotpHandler,
@@ -106,6 +111,7 @@ var ProviderSet = wire.NewSet(
admin.NewUserHandler,
admin.NewGroupHandler,
admin.NewAccountHandler,
+ admin.NewAnnouncementHandler,
admin.NewOAuthHandler,
admin.NewOpenAIOAuthHandler,
admin.NewGeminiOAuthHandler,
diff --git a/backend/internal/repository/announcement_read_repo.go b/backend/internal/repository/announcement_read_repo.go
new file mode 100644
index 00000000..1c6b480a
--- /dev/null
+++ b/backend/internal/repository/announcement_read_repo.go
@@ -0,0 +1,84 @@
+package repository
+
+import (
+ "context"
+ "time"
+
+ dbent "github.com/Wei-Shaw/sub2api/ent"
+ "github.com/Wei-Shaw/sub2api/ent/announcementread"
+ "github.com/Wei-Shaw/sub2api/internal/service"
+)
+
+type announcementReadRepository struct {
+ client *dbent.Client
+}
+
+func NewAnnouncementReadRepository(client *dbent.Client) service.AnnouncementReadRepository {
+ return &announcementReadRepository{client: client}
+}
+
+func (r *announcementReadRepository) MarkRead(ctx context.Context, announcementID, userID int64, readAt time.Time) error {
+ client := clientFromContext(ctx, r.client)
+ return client.AnnouncementRead.Create().
+ SetAnnouncementID(announcementID).
+ SetUserID(userID).
+ SetReadAt(readAt).
+ OnConflictColumns(announcementread.FieldAnnouncementID, announcementread.FieldUserID).
+ DoNothing().
+ Exec(ctx)
+}
+
+func (r *announcementReadRepository) GetReadMapByUser(ctx context.Context, userID int64, announcementIDs []int64) (map[int64]time.Time, error) {
+ if len(announcementIDs) == 0 {
+ return map[int64]time.Time{}, nil
+ }
+
+ rows, err := r.client.AnnouncementRead.Query().
+ Where(
+ announcementread.UserIDEQ(userID),
+ announcementread.AnnouncementIDIn(announcementIDs...),
+ ).
+ All(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ out := make(map[int64]time.Time, len(rows))
+ for i := range rows {
+ out[rows[i].AnnouncementID] = rows[i].ReadAt
+ }
+ return out, nil
+}
+
+func (r *announcementReadRepository) GetReadMapByUsers(ctx context.Context, announcementID int64, userIDs []int64) (map[int64]time.Time, error) {
+ if len(userIDs) == 0 {
+ return map[int64]time.Time{}, nil
+ }
+
+ rows, err := r.client.AnnouncementRead.Query().
+ Where(
+ announcementread.AnnouncementIDEQ(announcementID),
+ announcementread.UserIDIn(userIDs...),
+ ).
+ All(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ out := make(map[int64]time.Time, len(rows))
+ for i := range rows {
+ out[rows[i].UserID] = rows[i].ReadAt
+ }
+ return out, nil
+}
+
+func (r *announcementReadRepository) CountByAnnouncementID(ctx context.Context, announcementID int64) (int64, error) {
+ count, err := r.client.AnnouncementRead.Query().
+ Where(announcementread.AnnouncementIDEQ(announcementID)).
+ Count(ctx)
+ if err != nil {
+ return 0, err
+ }
+ return int64(count), nil
+}
+
diff --git a/backend/internal/repository/announcement_repo.go b/backend/internal/repository/announcement_repo.go
new file mode 100644
index 00000000..edeb82e6
--- /dev/null
+++ b/backend/internal/repository/announcement_repo.go
@@ -0,0 +1,195 @@
+package repository
+
+import (
+ "context"
+ "time"
+
+ dbent "github.com/Wei-Shaw/sub2api/ent"
+ "github.com/Wei-Shaw/sub2api/ent/announcement"
+ "github.com/Wei-Shaw/sub2api/internal/pkg/pagination"
+ "github.com/Wei-Shaw/sub2api/internal/service"
+)
+
+type announcementRepository struct {
+ client *dbent.Client
+}
+
+func NewAnnouncementRepository(client *dbent.Client) service.AnnouncementRepository {
+ return &announcementRepository{client: client}
+}
+
+func (r *announcementRepository) Create(ctx context.Context, a *service.Announcement) error {
+ client := clientFromContext(ctx, r.client)
+ builder := client.Announcement.Create().
+ SetTitle(a.Title).
+ SetContent(a.Content).
+ SetStatus(a.Status).
+ SetTargeting(a.Targeting)
+
+ if a.StartsAt != nil {
+ builder.SetStartsAt(*a.StartsAt)
+ }
+ if a.EndsAt != nil {
+ builder.SetEndsAt(*a.EndsAt)
+ }
+ if a.CreatedBy != nil {
+ builder.SetCreatedBy(*a.CreatedBy)
+ }
+ if a.UpdatedBy != nil {
+ builder.SetUpdatedBy(*a.UpdatedBy)
+ }
+
+ created, err := builder.Save(ctx)
+ if err != nil {
+ return err
+ }
+
+ applyAnnouncementEntityToService(a, created)
+ return nil
+}
+
+func (r *announcementRepository) GetByID(ctx context.Context, id int64) (*service.Announcement, error) {
+ m, err := r.client.Announcement.Query().
+ Where(announcement.IDEQ(id)).
+ Only(ctx)
+ if err != nil {
+ return nil, translatePersistenceError(err, service.ErrAnnouncementNotFound, nil)
+ }
+ return announcementEntityToService(m), nil
+}
+
+func (r *announcementRepository) Update(ctx context.Context, a *service.Announcement) error {
+ client := clientFromContext(ctx, r.client)
+ builder := client.Announcement.UpdateOneID(a.ID).
+ SetTitle(a.Title).
+ SetContent(a.Content).
+ SetStatus(a.Status).
+ SetTargeting(a.Targeting)
+
+ if a.StartsAt != nil {
+ builder.SetStartsAt(*a.StartsAt)
+ } else {
+ builder.ClearStartsAt()
+ }
+ if a.EndsAt != nil {
+ builder.SetEndsAt(*a.EndsAt)
+ } else {
+ builder.ClearEndsAt()
+ }
+ if a.CreatedBy != nil {
+ builder.SetCreatedBy(*a.CreatedBy)
+ } else {
+ builder.ClearCreatedBy()
+ }
+ if a.UpdatedBy != nil {
+ builder.SetUpdatedBy(*a.UpdatedBy)
+ } else {
+ builder.ClearUpdatedBy()
+ }
+
+ updated, err := builder.Save(ctx)
+ if err != nil {
+ return translatePersistenceError(err, service.ErrAnnouncementNotFound, nil)
+ }
+
+ a.UpdatedAt = updated.UpdatedAt
+ return nil
+}
+
+func (r *announcementRepository) Delete(ctx context.Context, id int64) error {
+ client := clientFromContext(ctx, r.client)
+ _, err := client.Announcement.Delete().Where(announcement.IDEQ(id)).Exec(ctx)
+ return err
+}
+
+func (r *announcementRepository) List(
+ ctx context.Context,
+ params pagination.PaginationParams,
+ filters service.AnnouncementListFilters,
+) ([]service.Announcement, *pagination.PaginationResult, error) {
+ q := r.client.Announcement.Query()
+
+ if filters.Status != "" {
+ q = q.Where(announcement.StatusEQ(filters.Status))
+ }
+ if filters.Search != "" {
+ q = q.Where(
+ announcement.Or(
+ announcement.TitleContainsFold(filters.Search),
+ announcement.ContentContainsFold(filters.Search),
+ ),
+ )
+ }
+
+ total, err := q.Count(ctx)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ items, err := q.
+ Offset(params.Offset()).
+ Limit(params.Limit()).
+ Order(dbent.Desc(announcement.FieldID)).
+ All(ctx)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ out := announcementEntitiesToService(items)
+ return out, paginationResultFromTotal(int64(total), params), nil
+}
+
+func (r *announcementRepository) ListActive(ctx context.Context, now time.Time) ([]service.Announcement, error) {
+ q := r.client.Announcement.Query().
+ Where(
+ announcement.StatusEQ(service.AnnouncementStatusActive),
+ announcement.Or(announcement.StartsAtIsNil(), announcement.StartsAtLTE(now)),
+ announcement.Or(announcement.EndsAtIsNil(), announcement.EndsAtGT(now)),
+ ).
+ Order(dbent.Desc(announcement.FieldID))
+
+ items, err := q.All(ctx)
+ if err != nil {
+ return nil, err
+ }
+ return announcementEntitiesToService(items), nil
+}
+
+func applyAnnouncementEntityToService(dst *service.Announcement, src *dbent.Announcement) {
+ if dst == nil || src == nil {
+ return
+ }
+ dst.ID = src.ID
+ dst.CreatedAt = src.CreatedAt
+ dst.UpdatedAt = src.UpdatedAt
+}
+
+func announcementEntityToService(m *dbent.Announcement) *service.Announcement {
+ if m == nil {
+ return nil
+ }
+ return &service.Announcement{
+ ID: m.ID,
+ Title: m.Title,
+ Content: m.Content,
+ Status: m.Status,
+ Targeting: m.Targeting,
+ StartsAt: m.StartsAt,
+ EndsAt: m.EndsAt,
+ CreatedBy: m.CreatedBy,
+ UpdatedBy: m.UpdatedBy,
+ CreatedAt: m.CreatedAt,
+ UpdatedAt: m.UpdatedAt,
+ }
+}
+
+func announcementEntitiesToService(models []*dbent.Announcement) []service.Announcement {
+ out := make([]service.Announcement, 0, len(models))
+ for i := range models {
+ if s := announcementEntityToService(models[i]); s != nil {
+ out = append(out, *s)
+ }
+ }
+ return out
+}
+
diff --git a/backend/internal/repository/wire.go b/backend/internal/repository/wire.go
index 3e1c05fc..e3394361 100644
--- a/backend/internal/repository/wire.go
+++ b/backend/internal/repository/wire.go
@@ -56,6 +56,8 @@ var ProviderSet = wire.NewSet(
NewProxyRepository,
NewRedeemCodeRepository,
NewPromoCodeRepository,
+ NewAnnouncementRepository,
+ NewAnnouncementReadRepository,
NewUsageLogRepository,
NewUsageCleanupRepository,
NewDashboardAggregationRepository,
diff --git a/backend/internal/server/routes/admin.go b/backend/internal/server/routes/admin.go
index 050e724d..3e0033e7 100644
--- a/backend/internal/server/routes/admin.go
+++ b/backend/internal/server/routes/admin.go
@@ -29,6 +29,9 @@ func RegisterAdminRoutes(
// 账号管理
registerAccountRoutes(admin, h)
+ // 公告管理
+ registerAnnouncementRoutes(admin, h)
+
// OpenAI OAuth
registerOpenAIOAuthRoutes(admin, h)
@@ -229,6 +232,18 @@ func registerAccountRoutes(admin *gin.RouterGroup, h *handler.Handlers) {
}
}
+func registerAnnouncementRoutes(admin *gin.RouterGroup, h *handler.Handlers) {
+ announcements := admin.Group("/announcements")
+ {
+ announcements.GET("", h.Admin.Announcement.List)
+ announcements.POST("", h.Admin.Announcement.Create)
+ announcements.GET("/:id", h.Admin.Announcement.GetByID)
+ announcements.PUT("/:id", h.Admin.Announcement.Update)
+ announcements.DELETE("/:id", h.Admin.Announcement.Delete)
+ announcements.GET("/:id/read-status", h.Admin.Announcement.ListReadStatus)
+ }
+}
+
func registerOpenAIOAuthRoutes(admin *gin.RouterGroup, h *handler.Handlers) {
openai := admin.Group("/openai")
{
diff --git a/backend/internal/server/routes/user.go b/backend/internal/server/routes/user.go
index 83cf31c4..5581e1e1 100644
--- a/backend/internal/server/routes/user.go
+++ b/backend/internal/server/routes/user.go
@@ -64,6 +64,13 @@ func RegisterUserRoutes(
usage.POST("/dashboard/api-keys-usage", h.Usage.DashboardAPIKeysUsage)
}
+ // 公告(用户可见)
+ announcements := authenticated.Group("/announcements")
+ {
+ announcements.GET("", h.Announcement.List)
+ announcements.POST("/:id/read", h.Announcement.MarkRead)
+ }
+
// 卡密兑换
redeem := authenticated.Group("/redeem")
{
diff --git a/backend/internal/service/announcement.go b/backend/internal/service/announcement.go
new file mode 100644
index 00000000..2ba5af5d
--- /dev/null
+++ b/backend/internal/service/announcement.go
@@ -0,0 +1,64 @@
+package service
+
+import (
+ "context"
+ "time"
+
+ "github.com/Wei-Shaw/sub2api/internal/domain"
+ "github.com/Wei-Shaw/sub2api/internal/pkg/pagination"
+)
+
+const (
+ AnnouncementStatusDraft = domain.AnnouncementStatusDraft
+ AnnouncementStatusActive = domain.AnnouncementStatusActive
+ AnnouncementStatusArchived = domain.AnnouncementStatusArchived
+)
+
+const (
+ AnnouncementConditionTypeSubscription = domain.AnnouncementConditionTypeSubscription
+ AnnouncementConditionTypeBalance = domain.AnnouncementConditionTypeBalance
+)
+
+const (
+ AnnouncementOperatorIn = domain.AnnouncementOperatorIn
+ AnnouncementOperatorGT = domain.AnnouncementOperatorGT
+ AnnouncementOperatorGTE = domain.AnnouncementOperatorGTE
+ AnnouncementOperatorLT = domain.AnnouncementOperatorLT
+ AnnouncementOperatorLTE = domain.AnnouncementOperatorLTE
+ AnnouncementOperatorEQ = domain.AnnouncementOperatorEQ
+)
+
+var (
+ ErrAnnouncementNotFound = domain.ErrAnnouncementNotFound
+ ErrAnnouncementInvalidTarget = domain.ErrAnnouncementInvalidTarget
+)
+
+type AnnouncementTargeting = domain.AnnouncementTargeting
+
+type AnnouncementConditionGroup = domain.AnnouncementConditionGroup
+
+type AnnouncementCondition = domain.AnnouncementCondition
+
+type Announcement = domain.Announcement
+
+type AnnouncementListFilters struct {
+ Status string
+ Search string
+}
+
+type AnnouncementRepository interface {
+ Create(ctx context.Context, a *Announcement) error
+ GetByID(ctx context.Context, id int64) (*Announcement, error)
+ Update(ctx context.Context, a *Announcement) error
+ Delete(ctx context.Context, id int64) error
+
+ List(ctx context.Context, params pagination.PaginationParams, filters AnnouncementListFilters) ([]Announcement, *pagination.PaginationResult, error)
+ ListActive(ctx context.Context, now time.Time) ([]Announcement, error)
+}
+
+type AnnouncementReadRepository interface {
+ MarkRead(ctx context.Context, announcementID, userID int64, readAt time.Time) error
+ GetReadMapByUser(ctx context.Context, userID int64, announcementIDs []int64) (map[int64]time.Time, error)
+ GetReadMapByUsers(ctx context.Context, announcementID int64, userIDs []int64) (map[int64]time.Time, error)
+ CountByAnnouncementID(ctx context.Context, announcementID int64) (int64, error)
+}
diff --git a/backend/internal/service/announcement_service.go b/backend/internal/service/announcement_service.go
new file mode 100644
index 00000000..c2588e6c
--- /dev/null
+++ b/backend/internal/service/announcement_service.go
@@ -0,0 +1,378 @@
+package service
+
+import (
+ "context"
+ "fmt"
+ "sort"
+ "strings"
+ "time"
+
+ "github.com/Wei-Shaw/sub2api/internal/domain"
+ "github.com/Wei-Shaw/sub2api/internal/pkg/pagination"
+)
+
+type AnnouncementService struct {
+ announcementRepo AnnouncementRepository
+ readRepo AnnouncementReadRepository
+ userRepo UserRepository
+ userSubRepo UserSubscriptionRepository
+}
+
+func NewAnnouncementService(
+ announcementRepo AnnouncementRepository,
+ readRepo AnnouncementReadRepository,
+ userRepo UserRepository,
+ userSubRepo UserSubscriptionRepository,
+) *AnnouncementService {
+ return &AnnouncementService{
+ announcementRepo: announcementRepo,
+ readRepo: readRepo,
+ userRepo: userRepo,
+ userSubRepo: userSubRepo,
+ }
+}
+
+type CreateAnnouncementInput struct {
+ Title string
+ Content string
+ Status string
+ Targeting AnnouncementTargeting
+ StartsAt *time.Time
+ EndsAt *time.Time
+ ActorID *int64 // 管理员用户ID
+}
+
+type UpdateAnnouncementInput struct {
+ Title *string
+ Content *string
+ Status *string
+ Targeting *AnnouncementTargeting
+ StartsAt **time.Time
+ EndsAt **time.Time
+ ActorID *int64 // 管理员用户ID
+}
+
+type UserAnnouncement struct {
+ Announcement Announcement
+ ReadAt *time.Time
+}
+
+type AnnouncementUserReadStatus struct {
+ UserID int64 `json:"user_id"`
+ Email string `json:"email"`
+ Username string `json:"username"`
+ Balance float64 `json:"balance"`
+ Eligible bool `json:"eligible"`
+ ReadAt *time.Time `json:"read_at,omitempty"`
+}
+
+func (s *AnnouncementService) Create(ctx context.Context, input *CreateAnnouncementInput) (*Announcement, error) {
+ if input == nil {
+ return nil, fmt.Errorf("create announcement: nil input")
+ }
+
+ title := strings.TrimSpace(input.Title)
+ content := strings.TrimSpace(input.Content)
+ if title == "" || len(title) > 200 {
+ return nil, fmt.Errorf("create announcement: invalid title")
+ }
+ if content == "" {
+ return nil, fmt.Errorf("create announcement: content is required")
+ }
+
+ status := strings.TrimSpace(input.Status)
+ if status == "" {
+ status = AnnouncementStatusDraft
+ }
+ if !isValidAnnouncementStatus(status) {
+ return nil, fmt.Errorf("create announcement: invalid status")
+ }
+
+ targeting, err := domain.AnnouncementTargeting(input.Targeting).NormalizeAndValidate()
+ if err != nil {
+ return nil, err
+ }
+
+ if input.StartsAt != nil && input.EndsAt != nil {
+ if !input.StartsAt.Before(*input.EndsAt) {
+ return nil, fmt.Errorf("create announcement: starts_at must be before ends_at")
+ }
+ }
+
+ a := &Announcement{
+ Title: title,
+ Content: content,
+ Status: status,
+ Targeting: targeting,
+ StartsAt: input.StartsAt,
+ EndsAt: input.EndsAt,
+ }
+ if input.ActorID != nil && *input.ActorID > 0 {
+ a.CreatedBy = input.ActorID
+ a.UpdatedBy = input.ActorID
+ }
+
+ if err := s.announcementRepo.Create(ctx, a); err != nil {
+ return nil, fmt.Errorf("create announcement: %w", err)
+ }
+ return a, nil
+}
+
+func (s *AnnouncementService) Update(ctx context.Context, id int64, input *UpdateAnnouncementInput) (*Announcement, error) {
+ if input == nil {
+ return nil, fmt.Errorf("update announcement: nil input")
+ }
+
+ a, err := s.announcementRepo.GetByID(ctx, id)
+ if err != nil {
+ return nil, err
+ }
+
+ if input.Title != nil {
+ title := strings.TrimSpace(*input.Title)
+ if title == "" || len(title) > 200 {
+ return nil, fmt.Errorf("update announcement: invalid title")
+ }
+ a.Title = title
+ }
+ if input.Content != nil {
+ content := strings.TrimSpace(*input.Content)
+ if content == "" {
+ return nil, fmt.Errorf("update announcement: content is required")
+ }
+ a.Content = content
+ }
+ if input.Status != nil {
+ status := strings.TrimSpace(*input.Status)
+ if !isValidAnnouncementStatus(status) {
+ return nil, fmt.Errorf("update announcement: invalid status")
+ }
+ a.Status = status
+ }
+
+ if input.Targeting != nil {
+ targeting, err := domain.AnnouncementTargeting(*input.Targeting).NormalizeAndValidate()
+ if err != nil {
+ return nil, err
+ }
+ a.Targeting = targeting
+ }
+
+ if input.StartsAt != nil {
+ a.StartsAt = *input.StartsAt
+ }
+ if input.EndsAt != nil {
+ a.EndsAt = *input.EndsAt
+ }
+
+ if a.StartsAt != nil && a.EndsAt != nil {
+ if !a.StartsAt.Before(*a.EndsAt) {
+ return nil, fmt.Errorf("update announcement: starts_at must be before ends_at")
+ }
+ }
+
+ if input.ActorID != nil && *input.ActorID > 0 {
+ a.UpdatedBy = input.ActorID
+ }
+
+ if err := s.announcementRepo.Update(ctx, a); err != nil {
+ return nil, fmt.Errorf("update announcement: %w", err)
+ }
+ return a, nil
+}
+
+func (s *AnnouncementService) Delete(ctx context.Context, id int64) error {
+ if err := s.announcementRepo.Delete(ctx, id); err != nil {
+ return fmt.Errorf("delete announcement: %w", err)
+ }
+ return nil
+}
+
+func (s *AnnouncementService) GetByID(ctx context.Context, id int64) (*Announcement, error) {
+ return s.announcementRepo.GetByID(ctx, id)
+}
+
+func (s *AnnouncementService) List(ctx context.Context, params pagination.PaginationParams, filters AnnouncementListFilters) ([]Announcement, *pagination.PaginationResult, error) {
+ return s.announcementRepo.List(ctx, params, filters)
+}
+
+func (s *AnnouncementService) ListForUser(ctx context.Context, userID int64, unreadOnly bool) ([]UserAnnouncement, error) {
+ user, err := s.userRepo.GetByID(ctx, userID)
+ if err != nil {
+ return nil, fmt.Errorf("get user: %w", err)
+ }
+
+ activeSubs, err := s.userSubRepo.ListActiveByUserID(ctx, userID)
+ if err != nil {
+ return nil, fmt.Errorf("list active subscriptions: %w", err)
+ }
+ activeGroupIDs := make(map[int64]struct{}, len(activeSubs))
+ for i := range activeSubs {
+ activeGroupIDs[activeSubs[i].GroupID] = struct{}{}
+ }
+
+ now := time.Now()
+ anns, err := s.announcementRepo.ListActive(ctx, now)
+ if err != nil {
+ return nil, fmt.Errorf("list active announcements: %w", err)
+ }
+
+ visible := make([]Announcement, 0, len(anns))
+ ids := make([]int64, 0, len(anns))
+ for i := range anns {
+ a := anns[i]
+ if !a.IsActiveAt(now) {
+ continue
+ }
+ if !a.Targeting.Matches(user.Balance, activeGroupIDs) {
+ continue
+ }
+ visible = append(visible, a)
+ ids = append(ids, a.ID)
+ }
+
+ if len(visible) == 0 {
+ return []UserAnnouncement{}, nil
+ }
+
+ readMap, err := s.readRepo.GetReadMapByUser(ctx, userID, ids)
+ if err != nil {
+ return nil, fmt.Errorf("get read map: %w", err)
+ }
+
+ out := make([]UserAnnouncement, 0, len(visible))
+ for i := range visible {
+ a := visible[i]
+ readAt, ok := readMap[a.ID]
+ if unreadOnly && ok {
+ continue
+ }
+ var ptr *time.Time
+ if ok {
+ t := readAt
+ ptr = &t
+ }
+ out = append(out, UserAnnouncement{
+ Announcement: a,
+ ReadAt: ptr,
+ })
+ }
+
+ // 未读优先、同状态按创建时间倒序
+ sort.Slice(out, func(i, j int) bool {
+ ai, aj := out[i], out[j]
+ if (ai.ReadAt == nil) != (aj.ReadAt == nil) {
+ return ai.ReadAt == nil
+ }
+ return ai.Announcement.ID > aj.Announcement.ID
+ })
+
+ return out, nil
+}
+
+func (s *AnnouncementService) MarkRead(ctx context.Context, userID, announcementID int64) error {
+ // 安全:仅允许标记当前用户“可见”的公告
+ user, err := s.userRepo.GetByID(ctx, userID)
+ if err != nil {
+ return fmt.Errorf("get user: %w", err)
+ }
+
+ a, err := s.announcementRepo.GetByID(ctx, announcementID)
+ if err != nil {
+ return err
+ }
+
+ now := time.Now()
+ if !a.IsActiveAt(now) {
+ return ErrAnnouncementNotFound
+ }
+
+ activeSubs, err := s.userSubRepo.ListActiveByUserID(ctx, userID)
+ if err != nil {
+ return fmt.Errorf("list active subscriptions: %w", err)
+ }
+ activeGroupIDs := make(map[int64]struct{}, len(activeSubs))
+ for i := range activeSubs {
+ activeGroupIDs[activeSubs[i].GroupID] = struct{}{}
+ }
+
+ if !a.Targeting.Matches(user.Balance, activeGroupIDs) {
+ return ErrAnnouncementNotFound
+ }
+
+ if err := s.readRepo.MarkRead(ctx, announcementID, userID, now); err != nil {
+ return fmt.Errorf("mark read: %w", err)
+ }
+ return nil
+}
+
+func (s *AnnouncementService) ListUserReadStatus(
+ ctx context.Context,
+ announcementID int64,
+ params pagination.PaginationParams,
+ search string,
+) ([]AnnouncementUserReadStatus, *pagination.PaginationResult, error) {
+ ann, err := s.announcementRepo.GetByID(ctx, announcementID)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ filters := UserListFilters{
+ Search: strings.TrimSpace(search),
+ }
+
+ users, page, err := s.userRepo.ListWithFilters(ctx, params, filters)
+ if err != nil {
+ return nil, nil, fmt.Errorf("list users: %w", err)
+ }
+
+ userIDs := make([]int64, 0, len(users))
+ for i := range users {
+ userIDs = append(userIDs, users[i].ID)
+ }
+
+ readMap, err := s.readRepo.GetReadMapByUsers(ctx, announcementID, userIDs)
+ if err != nil {
+ return nil, nil, fmt.Errorf("get read map: %w", err)
+ }
+
+ out := make([]AnnouncementUserReadStatus, 0, len(users))
+ for i := range users {
+ u := users[i]
+ subs, err := s.userSubRepo.ListActiveByUserID(ctx, u.ID)
+ if err != nil {
+ return nil, nil, fmt.Errorf("list active subscriptions: %w", err)
+ }
+ activeGroupIDs := make(map[int64]struct{}, len(subs))
+ for j := range subs {
+ activeGroupIDs[subs[j].GroupID] = struct{}{}
+ }
+
+ readAt, ok := readMap[u.ID]
+ var ptr *time.Time
+ if ok {
+ t := readAt
+ ptr = &t
+ }
+
+ out = append(out, AnnouncementUserReadStatus{
+ UserID: u.ID,
+ Email: u.Email,
+ Username: u.Username,
+ Balance: u.Balance,
+ Eligible: domain.AnnouncementTargeting(ann.Targeting).Matches(u.Balance, activeGroupIDs),
+ ReadAt: ptr,
+ })
+ }
+
+ return out, page, nil
+}
+
+func isValidAnnouncementStatus(status string) bool {
+ switch status {
+ case AnnouncementStatusDraft, AnnouncementStatusActive, AnnouncementStatusArchived:
+ return true
+ default:
+ return false
+ }
+}
diff --git a/backend/internal/service/announcement_targeting_test.go b/backend/internal/service/announcement_targeting_test.go
new file mode 100644
index 00000000..fffea26b
--- /dev/null
+++ b/backend/internal/service/announcement_targeting_test.go
@@ -0,0 +1,67 @@
+package service
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestAnnouncementTargeting_Matches_EmptyMatchesAll(t *testing.T) {
+ var targeting AnnouncementTargeting
+ require.True(t, targeting.Matches(0, nil))
+ require.True(t, targeting.Matches(123.45, map[int64]struct{}{1: {}}))
+}
+
+func TestAnnouncementTargeting_NormalizeAndValidate_RejectsEmptyGroup(t *testing.T) {
+ targeting := AnnouncementTargeting{
+ AnyOf: []AnnouncementConditionGroup{
+ {AllOf: nil},
+ },
+ }
+ _, err := targeting.NormalizeAndValidate()
+ require.Error(t, err)
+ require.ErrorIs(t, err, ErrAnnouncementInvalidTarget)
+}
+
+func TestAnnouncementTargeting_NormalizeAndValidate_RejectsInvalidCondition(t *testing.T) {
+ targeting := AnnouncementTargeting{
+ AnyOf: []AnnouncementConditionGroup{
+ {
+ AllOf: []AnnouncementCondition{
+ {Type: "balance", Operator: "between", Value: 10},
+ },
+ },
+ },
+ }
+ _, err := targeting.NormalizeAndValidate()
+ require.Error(t, err)
+ require.ErrorIs(t, err, ErrAnnouncementInvalidTarget)
+}
+
+func TestAnnouncementTargeting_Matches_AndOrSemantics(t *testing.T) {
+ targeting := AnnouncementTargeting{
+ AnyOf: []AnnouncementConditionGroup{
+ {
+ AllOf: []AnnouncementCondition{
+ {Type: AnnouncementConditionTypeBalance, Operator: AnnouncementOperatorGTE, Value: 100},
+ {Type: AnnouncementConditionTypeSubscription, Operator: AnnouncementOperatorIn, GroupIDs: []int64{10}},
+ },
+ },
+ {
+ AllOf: []AnnouncementCondition{
+ {Type: AnnouncementConditionTypeBalance, Operator: AnnouncementOperatorLT, Value: 5},
+ },
+ },
+ },
+ }
+
+ // 命中第 2 组(balance < 5)
+ require.True(t, targeting.Matches(4.99, nil))
+ require.False(t, targeting.Matches(5, nil))
+
+ // 命中第 1 组(balance >= 100 AND 订阅 in [10])
+ require.False(t, targeting.Matches(100, map[int64]struct{}{}))
+ require.False(t, targeting.Matches(99.9, map[int64]struct{}{10: {}}))
+ require.True(t, targeting.Matches(100, map[int64]struct{}{10: {}}))
+}
+
diff --git a/backend/internal/service/domain_constants.go b/backend/internal/service/domain_constants.go
index 44df9073..eee8bddd 100644
--- a/backend/internal/service/domain_constants.go
+++ b/backend/internal/service/domain_constants.go
@@ -1,66 +1,68 @@
package service
+import "github.com/Wei-Shaw/sub2api/internal/domain"
+
// Status constants
const (
- StatusActive = "active"
- StatusDisabled = "disabled"
- StatusError = "error"
- StatusUnused = "unused"
- StatusUsed = "used"
- StatusExpired = "expired"
+ StatusActive = domain.StatusActive
+ StatusDisabled = domain.StatusDisabled
+ StatusError = domain.StatusError
+ StatusUnused = domain.StatusUnused
+ StatusUsed = domain.StatusUsed
+ StatusExpired = domain.StatusExpired
)
// Role constants
const (
- RoleAdmin = "admin"
- RoleUser = "user"
+ RoleAdmin = domain.RoleAdmin
+ RoleUser = domain.RoleUser
)
// Platform constants
const (
- PlatformAnthropic = "anthropic"
- PlatformOpenAI = "openai"
- PlatformGemini = "gemini"
- PlatformAntigravity = "antigravity"
+ PlatformAnthropic = domain.PlatformAnthropic
+ PlatformOpenAI = domain.PlatformOpenAI
+ PlatformGemini = domain.PlatformGemini
+ PlatformAntigravity = domain.PlatformAntigravity
)
// Account type constants
const (
- AccountTypeOAuth = "oauth" // OAuth类型账号(full scope: profile + inference)
- AccountTypeSetupToken = "setup-token" // Setup Token类型账号(inference only scope)
- AccountTypeAPIKey = "apikey" // API Key类型账号
+ AccountTypeOAuth = domain.AccountTypeOAuth // OAuth类型账号(full scope: profile + inference)
+ AccountTypeSetupToken = domain.AccountTypeSetupToken // Setup Token类型账号(inference only scope)
+ AccountTypeAPIKey = domain.AccountTypeAPIKey // API Key类型账号
)
// Redeem type constants
const (
- RedeemTypeBalance = "balance"
- RedeemTypeConcurrency = "concurrency"
- RedeemTypeSubscription = "subscription"
+ RedeemTypeBalance = domain.RedeemTypeBalance
+ RedeemTypeConcurrency = domain.RedeemTypeConcurrency
+ RedeemTypeSubscription = domain.RedeemTypeSubscription
)
// PromoCode status constants
const (
- PromoCodeStatusActive = "active"
- PromoCodeStatusDisabled = "disabled"
+ PromoCodeStatusActive = domain.PromoCodeStatusActive
+ PromoCodeStatusDisabled = domain.PromoCodeStatusDisabled
)
// Admin adjustment type constants
const (
- AdjustmentTypeAdminBalance = "admin_balance" // 管理员调整余额
- AdjustmentTypeAdminConcurrency = "admin_concurrency" // 管理员调整并发数
+ AdjustmentTypeAdminBalance = domain.AdjustmentTypeAdminBalance // 管理员调整余额
+ AdjustmentTypeAdminConcurrency = domain.AdjustmentTypeAdminConcurrency // 管理员调整并发数
)
// Group subscription type constants
const (
- SubscriptionTypeStandard = "standard" // 标准计费模式(按余额扣费)
- SubscriptionTypeSubscription = "subscription" // 订阅模式(按限额控制)
+ SubscriptionTypeStandard = domain.SubscriptionTypeStandard // 标准计费模式(按余额扣费)
+ SubscriptionTypeSubscription = domain.SubscriptionTypeSubscription // 订阅模式(按限额控制)
)
// Subscription status constants
const (
- SubscriptionStatusActive = "active"
- SubscriptionStatusExpired = "expired"
- SubscriptionStatusSuspended = "suspended"
+ SubscriptionStatusActive = domain.SubscriptionStatusActive
+ SubscriptionStatusExpired = domain.SubscriptionStatusExpired
+ SubscriptionStatusSuspended = domain.SubscriptionStatusSuspended
)
// LinuxDoConnectSyntheticEmailDomain 是 LinuxDo Connect 用户的合成邮箱后缀(RFC 保留域名)。
diff --git a/backend/internal/service/wire.go b/backend/internal/service/wire.go
index df86b2e7..096e15a0 100644
--- a/backend/internal/service/wire.go
+++ b/backend/internal/service/wire.go
@@ -226,6 +226,7 @@ var ProviderSet = wire.NewSet(
ProvidePricingService,
NewBillingService,
NewBillingCacheService,
+ NewAnnouncementService,
NewAdminService,
NewGatewayService,
NewOpenAIGatewayService,
diff --git a/backend/migrations/045_add_announcements.sql b/backend/migrations/045_add_announcements.sql
new file mode 100644
index 00000000..cfb9b4b5
--- /dev/null
+++ b/backend/migrations/045_add_announcements.sql
@@ -0,0 +1,44 @@
+-- 创建公告表
+CREATE TABLE IF NOT EXISTS announcements (
+ id BIGSERIAL PRIMARY KEY,
+ title VARCHAR(200) NOT NULL,
+ content TEXT NOT NULL,
+ status VARCHAR(20) NOT NULL DEFAULT 'draft',
+ targeting JSONB NOT NULL DEFAULT '{}'::jsonb,
+ starts_at TIMESTAMPTZ DEFAULT NULL,
+ ends_at TIMESTAMPTZ DEFAULT NULL,
+ created_by BIGINT DEFAULT NULL REFERENCES users(id) ON DELETE SET NULL,
+ updated_by BIGINT DEFAULT NULL REFERENCES users(id) ON DELETE SET NULL,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
+);
+
+-- 公告已读表
+CREATE TABLE IF NOT EXISTS announcement_reads (
+ id BIGSERIAL PRIMARY KEY,
+ announcement_id BIGINT NOT NULL REFERENCES announcements(id) ON DELETE CASCADE,
+ user_id BIGINT NOT NULL REFERENCES users(id) ON DELETE CASCADE,
+ read_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ UNIQUE(announcement_id, user_id)
+);
+
+-- 索引
+CREATE INDEX IF NOT EXISTS idx_announcements_status ON announcements(status);
+CREATE INDEX IF NOT EXISTS idx_announcements_starts_at ON announcements(starts_at);
+CREATE INDEX IF NOT EXISTS idx_announcements_ends_at ON announcements(ends_at);
+CREATE INDEX IF NOT EXISTS idx_announcements_created_at ON announcements(created_at);
+
+CREATE INDEX IF NOT EXISTS idx_announcement_reads_announcement_id ON announcement_reads(announcement_id);
+CREATE INDEX IF NOT EXISTS idx_announcement_reads_user_id ON announcement_reads(user_id);
+CREATE INDEX IF NOT EXISTS idx_announcement_reads_read_at ON announcement_reads(read_at);
+
+COMMENT ON TABLE announcements IS '系统公告';
+COMMENT ON COLUMN announcements.status IS '状态: draft, active, archived';
+COMMENT ON COLUMN announcements.targeting IS '展示条件(JSON 规则)';
+COMMENT ON COLUMN announcements.starts_at IS '开始展示时间(为空表示立即生效)';
+COMMENT ON COLUMN announcements.ends_at IS '结束展示时间(为空表示永久生效)';
+
+COMMENT ON TABLE announcement_reads IS '公告已读记录';
+COMMENT ON COLUMN announcement_reads.read_at IS '用户首次已读时间';
+
diff --git a/frontend/src/api/admin/announcements.ts b/frontend/src/api/admin/announcements.ts
new file mode 100644
index 00000000..d02fdda7
--- /dev/null
+++ b/frontend/src/api/admin/announcements.ts
@@ -0,0 +1,71 @@
+/**
+ * Admin Announcements API endpoints
+ */
+
+import { apiClient } from '../client'
+import type {
+ Announcement,
+ AnnouncementUserReadStatus,
+ BasePaginationResponse,
+ CreateAnnouncementRequest,
+ UpdateAnnouncementRequest
+} from '@/types'
+
+export async function list(
+ page: number = 1,
+ pageSize: number = 20,
+ filters?: {
+ status?: string
+ search?: string
+ }
+): Promise> {
+ const { data } = await apiClient.get>('/admin/announcements', {
+ params: { page, page_size: pageSize, ...filters }
+ })
+ return data
+}
+
+export async function getById(id: number): Promise {
+ const { data } = await apiClient.get(`/admin/announcements/${id}`)
+ return data
+}
+
+export async function create(request: CreateAnnouncementRequest): Promise {
+ const { data } = await apiClient.post('/admin/announcements', request)
+ return data
+}
+
+export async function update(id: number, request: UpdateAnnouncementRequest): Promise {
+ const { data } = await apiClient.put(`/admin/announcements/${id}`, request)
+ return data
+}
+
+export async function deleteAnnouncement(id: number): Promise<{ message: string }> {
+ const { data } = await apiClient.delete<{ message: string }>(`/admin/announcements/${id}`)
+ return data
+}
+
+export async function getReadStatus(
+ id: number,
+ page: number = 1,
+ pageSize: number = 20,
+ search: string = ''
+): Promise> {
+ const { data } = await apiClient.get>(
+ `/admin/announcements/${id}/read-status`,
+ { params: { page, page_size: pageSize, search } }
+ )
+ return data
+}
+
+const announcementsAPI = {
+ list,
+ getById,
+ create,
+ update,
+ delete: deleteAnnouncement,
+ getReadStatus
+}
+
+export default announcementsAPI
+
diff --git a/frontend/src/api/admin/index.ts b/frontend/src/api/admin/index.ts
index e86f6348..a88b02c6 100644
--- a/frontend/src/api/admin/index.ts
+++ b/frontend/src/api/admin/index.ts
@@ -10,6 +10,7 @@ import accountsAPI from './accounts'
import proxiesAPI from './proxies'
import redeemAPI from './redeem'
import promoAPI from './promo'
+import announcementsAPI from './announcements'
import settingsAPI from './settings'
import systemAPI from './system'
import subscriptionsAPI from './subscriptions'
@@ -30,6 +31,7 @@ export const adminAPI = {
proxies: proxiesAPI,
redeem: redeemAPI,
promo: promoAPI,
+ announcements: announcementsAPI,
settings: settingsAPI,
system: systemAPI,
subscriptions: subscriptionsAPI,
@@ -48,6 +50,7 @@ export {
proxiesAPI,
redeemAPI,
promoAPI,
+ announcementsAPI,
settingsAPI,
systemAPI,
subscriptionsAPI,
diff --git a/frontend/src/api/announcements.ts b/frontend/src/api/announcements.ts
new file mode 100644
index 00000000..a9034e2a
--- /dev/null
+++ b/frontend/src/api/announcements.ts
@@ -0,0 +1,26 @@
+/**
+ * User Announcements API endpoints
+ */
+
+import { apiClient } from './client'
+import type { UserAnnouncement } from '@/types'
+
+export async function list(unreadOnly: boolean = false): Promise {
+ const { data } = await apiClient.get('/announcements', {
+ params: unreadOnly ? { unread_only: 1 } : {}
+ })
+ return data
+}
+
+export async function markRead(id: number): Promise<{ message: string }> {
+ const { data } = await apiClient.post<{ message: string }>(`/announcements/${id}/read`)
+ return data
+}
+
+const announcementsAPI = {
+ list,
+ markRead
+}
+
+export default announcementsAPI
+
diff --git a/frontend/src/api/index.ts b/frontend/src/api/index.ts
index 347d0b94..070ce648 100644
--- a/frontend/src/api/index.ts
+++ b/frontend/src/api/index.ts
@@ -16,6 +16,7 @@ export { userAPI } from './user'
export { redeemAPI, type RedeemHistoryItem } from './redeem'
export { userGroupsAPI } from './groups'
export { totpAPI } from './totp'
+export { default as announcementsAPI } from './announcements'
// Admin APIs
export { adminAPI } from './admin'
diff --git a/frontend/src/components/admin/announcements/AnnouncementReadStatusDialog.vue b/frontend/src/components/admin/announcements/AnnouncementReadStatusDialog.vue
new file mode 100644
index 00000000..e7d991a8
--- /dev/null
+++ b/frontend/src/components/admin/announcements/AnnouncementReadStatusDialog.vue
@@ -0,0 +1,186 @@
+
+
+
+
+
+
+
+ {{ value }}
+
+
+
+ ${{ Number(value ?? 0).toFixed(2) }}
+
+
+
+
+ {{ value ? t('admin.announcements.eligible') : t('common.no') }}
+
+
+
+
+
+ {{ value ? formatDateTime(value) : t('admin.announcements.unread') }}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/frontend/src/components/admin/announcements/AnnouncementTargetingEditor.vue b/frontend/src/components/admin/announcements/AnnouncementTargetingEditor.vue
new file mode 100644
index 00000000..bd90af42
--- /dev/null
+++ b/frontend/src/components/admin/announcements/AnnouncementTargetingEditor.vue
@@ -0,0 +1,388 @@
+
+
+
+
+
+ {{ t('admin.announcements.form.targetingMode') }}
+
+
+ {{ mode === 'all' ? t('admin.announcements.form.targetingAll') : t('admin.announcements.form.targetingCustom') }}
+
+
+
+
+
+
+
+
+
+
+
+
+ OR
+
+ ({{ anyOf.length }}/50)
+
+
+
+
+
+
+ {{ t('admin.announcements.form.targetingCustom') }}: {{ t('admin.announcements.form.addOrGroup') }}
+
+
+
+
+
+
+ {{ t('admin.announcements.form.targetingCustom') }} #{{ groupIndex + 1 }}
+ AND ({{ (group.all_of?.length || 0) }}/50)
+
+
+ {{ t('admin.announcements.form.addAndCondition') }}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ setBalanceValue(groupIndex, condIndex, (e.target as HTMLInputElement).value)"
+ />
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {{ validationError }}
+
+
+
+
+
+
diff --git a/frontend/src/components/layout/AppSidebar.vue b/frontend/src/components/layout/AppSidebar.vue
index 474e4390..c685a2fa 100644
--- a/frontend/src/components/layout/AppSidebar.vue
+++ b/frontend/src/components/layout/AppSidebar.vue
@@ -319,6 +319,21 @@ const ServerIcon = {
)
}
+const BellIcon = {
+ render: () =>
+ h(
+ 'svg',
+ { fill: 'none', viewBox: '0 0 24 24', stroke: 'currentColor', 'stroke-width': '1.5' },
+ [
+ h('path', {
+ 'stroke-linecap': 'round',
+ 'stroke-linejoin': 'round',
+ d: 'M14.857 17.082a23.848 23.848 0 005.454-1.31A8.967 8.967 0 0118 9.75V9a6 6 0 10-12 0v.75a8.967 8.967 0 01-2.312 6.022c1.733.64 3.56 1.085 5.455 1.31m5.714 0a24.255 24.255 0 01-5.714 0m5.714 0a3 3 0 11-5.714 0'
+ })
+ ]
+ )
+}
+
const TicketIcon = {
render: () =>
h(
@@ -418,6 +433,7 @@ const ChevronDoubleRightIcon = {
const userNavItems = computed(() => {
const items = [
{ path: '/dashboard', label: t('nav.dashboard'), icon: DashboardIcon },
+ { path: '/announcements', label: t('nav.announcements'), icon: BellIcon },
{ path: '/keys', label: t('nav.apiKeys'), icon: KeyIcon },
{ path: '/usage', label: t('nav.usage'), icon: ChartIcon, hideInSimpleMode: true },
{ path: '/subscriptions', label: t('nav.mySubscriptions'), icon: CreditCardIcon, hideInSimpleMode: true },
@@ -440,6 +456,7 @@ const userNavItems = computed(() => {
// Personal navigation items (for admin's "My Account" section, without Dashboard)
const personalNavItems = computed(() => {
const items = [
+ { path: '/announcements', label: t('nav.announcements'), icon: BellIcon },
{ path: '/keys', label: t('nav.apiKeys'), icon: KeyIcon },
{ path: '/usage', label: t('nav.usage'), icon: ChartIcon, hideInSimpleMode: true },
{ path: '/subscriptions', label: t('nav.mySubscriptions'), icon: CreditCardIcon, hideInSimpleMode: true },
@@ -470,6 +487,7 @@ const adminNavItems = computed(() => {
{ path: '/admin/groups', label: t('nav.groups'), icon: FolderIcon, hideInSimpleMode: true },
{ path: '/admin/subscriptions', label: t('nav.subscriptions'), icon: CreditCardIcon, hideInSimpleMode: true },
{ path: '/admin/accounts', label: t('nav.accounts'), icon: GlobeIcon },
+ { path: '/admin/announcements', label: t('nav.announcements'), icon: BellIcon },
{ path: '/admin/proxies', label: t('nav.proxies'), icon: ServerIcon },
{ path: '/admin/redeem', label: t('nav.redeemCodes'), icon: TicketIcon, hideInSimpleMode: true },
{ path: '/admin/promo-codes', label: t('nav.promoCodes'), icon: GiftIcon, hideInSimpleMode: true },
diff --git a/frontend/src/i18n/locales/en.ts b/frontend/src/i18n/locales/en.ts
index dc93d37c..3ceaa063 100644
--- a/frontend/src/i18n/locales/en.ts
+++ b/frontend/src/i18n/locales/en.ts
@@ -185,6 +185,7 @@ export default {
// Navigation
nav: {
dashboard: 'Dashboard',
+ announcements: 'Announcements',
apiKeys: 'API Keys',
usage: 'Usage',
redeem: 'Redeem',
@@ -1951,6 +1952,73 @@ export default {
}
},
+ // Announcements
+ announcements: {
+ title: 'Announcements',
+ description: 'Create announcements and target by conditions',
+ createAnnouncement: 'Create Announcement',
+ editAnnouncement: 'Edit Announcement',
+ deleteAnnouncement: 'Delete Announcement',
+ searchAnnouncements: 'Search announcements...',
+ status: 'Status',
+ allStatus: 'All Status',
+ columns: {
+ title: 'Title',
+ status: 'Status',
+ targeting: 'Targeting',
+ timeRange: 'Schedule',
+ createdAt: 'Created At',
+ actions: 'Actions'
+ },
+ statusLabels: {
+ draft: 'Draft',
+ active: 'Active',
+ archived: 'Archived'
+ },
+ form: {
+ title: 'Title',
+ content: 'Content (Markdown supported)',
+ status: 'Status',
+ startsAt: 'Starts At',
+ endsAt: 'Ends At',
+ startsAtHint: 'Leave empty to start immediately',
+ endsAtHint: 'Leave empty to never expire',
+ targetingMode: 'Targeting',
+ targetingAll: 'All users',
+ targetingCustom: 'Custom rules',
+ addOrGroup: 'Add OR group',
+ addAndCondition: 'Add AND condition',
+ conditionType: 'Condition type',
+ conditionSubscription: 'Subscription',
+ conditionBalance: 'Balance',
+ operator: 'Operator',
+ balanceValue: 'Balance threshold',
+ selectPackages: 'Select packages'
+ },
+ operators: {
+ gt: '>',
+ gte: '≥',
+ lt: '<',
+ lte: '≤',
+ eq: '='
+ },
+ targetingSummaryAll: 'All users',
+ targetingSummaryCustom: 'Custom ({groups} groups)',
+ timeImmediate: 'Immediate',
+ timeNever: 'Never',
+ readStatus: 'Read Status',
+ eligible: 'Eligible',
+ readAt: 'Read at',
+ unread: 'Unread',
+ searchUsers: 'Search users...',
+ failedToLoad: 'Failed to load announcements',
+ failedToCreate: 'Failed to create announcement',
+ failedToUpdate: 'Failed to update announcement',
+ failedToDelete: 'Failed to delete announcement',
+ failedToLoadReadStatus: 'Failed to load read status',
+ deleteConfirm: 'Are you sure you want to delete this announcement? This action cannot be undone.'
+ },
+
// Promo Codes
promo: {
title: 'Promo Code Management',
@@ -3063,6 +3131,21 @@ export default {
'The administrator enabled the entry but has not configured a purchase URL. Please contact admin.'
},
+ // Announcements Page
+ announcements: {
+ title: 'Announcements',
+ description: 'View system announcements',
+ unreadOnly: 'Show unread only',
+ markRead: 'Mark as read',
+ readAt: 'Read at',
+ read: 'Read',
+ unread: 'Unread',
+ startsAt: 'Starts at',
+ endsAt: 'Ends at',
+ empty: 'No announcements',
+ emptyUnread: 'No unread announcements'
+ },
+
// User Subscriptions Page
userSubscriptions: {
title: 'My Subscriptions',
diff --git a/frontend/src/i18n/locales/zh.ts b/frontend/src/i18n/locales/zh.ts
index 4b6a9be6..0b456624 100644
--- a/frontend/src/i18n/locales/zh.ts
+++ b/frontend/src/i18n/locales/zh.ts
@@ -182,6 +182,7 @@ export default {
// Navigation
nav: {
dashboard: '仪表盘',
+ announcements: '公告',
apiKeys: 'API 密钥',
usage: '使用记录',
redeem: '兑换',
@@ -2098,6 +2099,73 @@ export default {
failedToDelete: '删除兑换码失败'
},
+ // Announcements
+ announcements: {
+ title: '公告管理',
+ description: '创建公告并按条件投放',
+ createAnnouncement: '创建公告',
+ editAnnouncement: '编辑公告',
+ deleteAnnouncement: '删除公告',
+ searchAnnouncements: '搜索公告...',
+ status: '状态',
+ allStatus: '全部状态',
+ columns: {
+ title: '标题',
+ status: '状态',
+ targeting: '展示条件',
+ timeRange: '有效期',
+ createdAt: '创建时间',
+ actions: '操作'
+ },
+ statusLabels: {
+ draft: '草稿',
+ active: '展示中',
+ archived: '已归档'
+ },
+ form: {
+ title: '标题',
+ content: '内容(支持 Markdown)',
+ status: '状态',
+ startsAt: '开始时间',
+ endsAt: '结束时间',
+ startsAtHint: '留空表示立即生效',
+ endsAtHint: '留空表示永久生效',
+ targetingMode: '展示条件',
+ targetingAll: '所有用户',
+ targetingCustom: '按条件',
+ addOrGroup: '添加 OR 条件组',
+ addAndCondition: '添加 AND 条件',
+ conditionType: '条件类型',
+ conditionSubscription: '订阅套餐',
+ conditionBalance: '余额',
+ operator: '运算符',
+ balanceValue: '余额阈值',
+ selectPackages: '选择套餐'
+ },
+ operators: {
+ gt: '>',
+ gte: '≥',
+ lt: '<',
+ lte: '≤',
+ eq: '='
+ },
+ targetingSummaryAll: '全部用户',
+ targetingSummaryCustom: '自定义({groups} 组)',
+ timeImmediate: '立即',
+ timeNever: '永久',
+ readStatus: '已读情况',
+ eligible: '符合条件',
+ readAt: '已读时间',
+ unread: '未读',
+ searchUsers: '搜索用户...',
+ failedToLoad: '加载公告失败',
+ failedToCreate: '创建公告失败',
+ failedToUpdate: '更新公告失败',
+ failedToDelete: '删除公告失败',
+ failedToLoadReadStatus: '加载已读情况失败',
+ deleteConfirm: '确定要删除该公告吗?此操作无法撤销。'
+ },
+
// Promo Codes
promo: {
title: '优惠码管理',
@@ -3212,6 +3280,21 @@ export default {
notConfiguredDesc: '管理员已开启入口,但尚未配置购买订阅链接,请联系管理员。'
},
+ // Announcements Page
+ announcements: {
+ title: '公告',
+ description: '查看系统公告',
+ unreadOnly: '仅显示未读',
+ markRead: '标记已读',
+ readAt: '已读时间',
+ read: '已读',
+ unread: '未读',
+ startsAt: '开始时间',
+ endsAt: '结束时间',
+ empty: '暂无公告',
+ emptyUnread: '暂无未读公告'
+ },
+
// User Subscriptions Page
userSubscriptions: {
title: '我的订阅',
diff --git a/frontend/src/router/index.ts b/frontend/src/router/index.ts
index a8ddc67f..4e2c1147 100644
--- a/frontend/src/router/index.ts
+++ b/frontend/src/router/index.ts
@@ -187,6 +187,18 @@ const routes: RouteRecordRaw[] = [
descriptionKey: 'purchase.description'
}
},
+ {
+ path: '/announcements',
+ name: 'Announcements',
+ component: () => import('@/views/user/AnnouncementsView.vue'),
+ meta: {
+ requiresAuth: true,
+ requiresAdmin: false,
+ title: 'Announcements',
+ titleKey: 'announcements.title',
+ descriptionKey: 'announcements.description'
+ }
+ },
// ==================== Admin Routes ====================
{
@@ -265,6 +277,18 @@ const routes: RouteRecordRaw[] = [
descriptionKey: 'admin.accounts.description'
}
},
+ {
+ path: '/admin/announcements',
+ name: 'AdminAnnouncements',
+ component: () => import('@/views/admin/AnnouncementsView.vue'),
+ meta: {
+ requiresAuth: true,
+ requiresAdmin: true,
+ title: 'Announcements',
+ titleKey: 'admin.announcements.title',
+ descriptionKey: 'admin.announcements.description'
+ }
+ },
{
path: '/admin/proxies',
name: 'AdminProxies',
diff --git a/frontend/src/types/index.ts b/frontend/src/types/index.ts
index 6f3b972e..9802d5c8 100644
--- a/frontend/src/types/index.ts
+++ b/frontend/src/types/index.ts
@@ -129,6 +129,81 @@ export interface UpdateSubscriptionRequest {
is_active?: boolean
}
+// ==================== Announcement Types ====================
+
+export type AnnouncementStatus = 'draft' | 'active' | 'archived'
+
+export type AnnouncementConditionType = 'subscription' | 'balance'
+
+export type AnnouncementOperator = 'in' | 'gt' | 'gte' | 'lt' | 'lte' | 'eq'
+
+export interface AnnouncementCondition {
+ type: AnnouncementConditionType
+ operator: AnnouncementOperator
+ group_ids?: number[]
+ value?: number
+}
+
+export interface AnnouncementConditionGroup {
+ all_of?: AnnouncementCondition[]
+}
+
+export interface AnnouncementTargeting {
+ any_of?: AnnouncementConditionGroup[]
+}
+
+export interface Announcement {
+ id: number
+ title: string
+ content: string
+ status: AnnouncementStatus
+ targeting: AnnouncementTargeting
+ starts_at?: string
+ ends_at?: string
+ created_by?: number
+ updated_by?: number
+ created_at: string
+ updated_at: string
+}
+
+export interface UserAnnouncement {
+ id: number
+ title: string
+ content: string
+ starts_at?: string
+ ends_at?: string
+ read_at?: string
+ created_at: string
+ updated_at: string
+}
+
+export interface CreateAnnouncementRequest {
+ title: string
+ content: string
+ status?: AnnouncementStatus
+ targeting: AnnouncementTargeting
+ starts_at?: number
+ ends_at?: number
+}
+
+export interface UpdateAnnouncementRequest {
+ title?: string
+ content?: string
+ status?: AnnouncementStatus
+ targeting?: AnnouncementTargeting
+ starts_at?: number
+ ends_at?: number
+}
+
+export interface AnnouncementUserReadStatus {
+ user_id: number
+ email: string
+ username: string
+ balance: number
+ eligible: boolean
+ read_at?: string
+}
+
// ==================== Proxy Node Types ====================
export interface ProxyNode {
diff --git a/frontend/src/views/admin/AnnouncementsView.vue b/frontend/src/views/admin/AnnouncementsView.vue
new file mode 100644
index 00000000..38574454
--- /dev/null
+++ b/frontend/src/views/admin/AnnouncementsView.vue
@@ -0,0 +1,538 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {{ value }}
+
+
+ #{{ row.id }}
+ ·
+ {{ formatDateTime(row.created_at) }}
+
+
+
+
+
+
+ {{ statusLabel(value) }}
+
+
+
+
+
+ {{ targetingSummary(row.targeting) }}
+
+
+
+
+
+
+ {{ t('admin.announcements.form.startsAt') }}:
+ {{ row.starts_at ? formatDateTime(row.starts_at) : t('admin.announcements.timeImmediate') }}
+
+
+ {{ t('admin.announcements.form.endsAt') }}:
+ {{ row.ends_at ? formatDateTime(row.ends_at) : t('admin.announcements.timeNever') }}
+
+
+
+
+
+ {{ formatDateTime(value) }}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/frontend/src/views/user/AnnouncementsView.vue b/frontend/src/views/user/AnnouncementsView.vue
new file mode 100644
index 00000000..99ea253e
--- /dev/null
+++ b/frontend/src/views/user/AnnouncementsView.vue
@@ -0,0 +1,140 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {{ unreadOnly ? t('announcements.emptyUnread') : t('announcements.empty') }}
+
+
+
+
+
+
+
+
+ {{ item.title }}
+
+
+ {{ t('announcements.unread') }}
+
+
+ {{ t('announcements.read') }}
+
+
+
+ {{ formatDateTime(item.created_at) }}
+
+ {{ t('announcements.startsAt') }}: {{ formatDateTime(item.starts_at) }}
+
+
+ {{ t('announcements.endsAt') }}: {{ formatDateTime(item.ends_at) }}
+
+
+
+
+
+
+
+ {{ t('announcements.readAt') }}: {{ formatDateTime(item.read_at) }}
+
+
+
+
+
+ {{ item.content }}
+
+
+
+
+
+
+
+
+
From 9bee0a20713322adca3c7a0bf3d53b9b338cba74 Mon Sep 17 00:00:00 2001
From: ducky
Date: Fri, 30 Jan 2026 17:28:53 +0800
Subject: [PATCH 61/99] chore: gofmt for golangci-lint
---
backend/ent/schema/announcement.go | 1 -
backend/ent/schema/announcement_read.go | 1 -
.../internal/handler/admin/announcement_handler.go | 11 +++++------
backend/internal/handler/announcement_handler.go | 1 -
backend/internal/handler/dto/announcement.go | 1 -
backend/internal/repository/announcement_read_repo.go | 1 -
backend/internal/repository/announcement_repo.go | 1 -
.../internal/service/announcement_targeting_test.go | 1 -
8 files changed, 5 insertions(+), 13 deletions(-)
diff --git a/backend/ent/schema/announcement.go b/backend/ent/schema/announcement.go
index 3b534831..1568778f 100644
--- a/backend/ent/schema/announcement.go
+++ b/backend/ent/schema/announcement.go
@@ -88,4 +88,3 @@ func (Announcement) Indexes() []ent.Index {
index.Fields("ends_at"),
}
}
-
diff --git a/backend/ent/schema/announcement_read.go b/backend/ent/schema/announcement_read.go
index 2f80d8b2..e0b50777 100644
--- a/backend/ent/schema/announcement_read.go
+++ b/backend/ent/schema/announcement_read.go
@@ -63,4 +63,3 @@ func (AnnouncementRead) Indexes() []ent.Index {
index.Fields("announcement_id", "user_id").Unique(),
}
}
-
diff --git a/backend/internal/handler/admin/announcement_handler.go b/backend/internal/handler/admin/announcement_handler.go
index a4e9f2f0..0b5d0fbc 100644
--- a/backend/internal/handler/admin/announcement_handler.go
+++ b/backend/internal/handler/admin/announcement_handler.go
@@ -27,12 +27,12 @@ func NewAnnouncementHandler(announcementService *service.AnnouncementService) *A
}
type CreateAnnouncementRequest struct {
- Title string `json:"title" binding:"required"`
- Content string `json:"content" binding:"required"`
- Status string `json:"status" binding:"omitempty,oneof=draft active archived"`
+ Title string `json:"title" binding:"required"`
+ Content string `json:"content" binding:"required"`
+ Status string `json:"status" binding:"omitempty,oneof=draft active archived"`
Targeting service.AnnouncementTargeting `json:"targeting"`
- StartsAt *int64 `json:"starts_at"` // Unix seconds, 0/empty = immediate
- EndsAt *int64 `json:"ends_at"` // Unix seconds, 0/empty = never
+ StartsAt *int64 `json:"starts_at"` // Unix seconds, 0/empty = immediate
+ EndsAt *int64 `json:"ends_at"` // Unix seconds, 0/empty = never
}
type UpdateAnnouncementRequest struct {
@@ -244,4 +244,3 @@ func (h *AnnouncementHandler) ListReadStatus(c *gin.Context) {
response.Paginated(c, items, paginationResult.Total, page, pageSize)
}
-
diff --git a/backend/internal/handler/announcement_handler.go b/backend/internal/handler/announcement_handler.go
index 1e1424eb..72823eaf 100644
--- a/backend/internal/handler/announcement_handler.go
+++ b/backend/internal/handler/announcement_handler.go
@@ -79,4 +79,3 @@ func parseBoolQuery(v string) bool {
return false
}
}
-
diff --git a/backend/internal/handler/dto/announcement.go b/backend/internal/handler/dto/announcement.go
index ec2a8ca7..bc0db1b2 100644
--- a/backend/internal/handler/dto/announcement.go
+++ b/backend/internal/handler/dto/announcement.go
@@ -72,4 +72,3 @@ func UserAnnouncementFromService(a *service.UserAnnouncement) *UserAnnouncement
UpdatedAt: a.Announcement.UpdatedAt,
}
}
-
diff --git a/backend/internal/repository/announcement_read_repo.go b/backend/internal/repository/announcement_read_repo.go
index 1c6b480a..2dc346b1 100644
--- a/backend/internal/repository/announcement_read_repo.go
+++ b/backend/internal/repository/announcement_read_repo.go
@@ -81,4 +81,3 @@ func (r *announcementReadRepository) CountByAnnouncementID(ctx context.Context,
}
return int64(count), nil
}
-
diff --git a/backend/internal/repository/announcement_repo.go b/backend/internal/repository/announcement_repo.go
index edeb82e6..52029e4e 100644
--- a/backend/internal/repository/announcement_repo.go
+++ b/backend/internal/repository/announcement_repo.go
@@ -192,4 +192,3 @@ func announcementEntitiesToService(models []*dbent.Announcement) []service.Annou
}
return out
}
-
diff --git a/backend/internal/service/announcement_targeting_test.go b/backend/internal/service/announcement_targeting_test.go
index fffea26b..4d904c7d 100644
--- a/backend/internal/service/announcement_targeting_test.go
+++ b/backend/internal/service/announcement_targeting_test.go
@@ -64,4 +64,3 @@ func TestAnnouncementTargeting_Matches_AndOrSemantics(t *testing.T) {
require.False(t, targeting.Matches(99.9, map[int64]struct{}{10: {}}))
require.True(t, targeting.Matches(100, map[int64]struct{}{10: {}}))
}
-
From fe17058700a2664597ef02b27e6995e35a7804bc Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Sat, 31 Jan 2026 01:40:38 +0800
Subject: [PATCH 62/99] refactor: limit OpenCode keyword replacement to tool
descriptions
---
.../internal/service/gateway_sanitize_test.go | 6 ++---
backend/internal/service/gateway_service.go | 22 +++++++++++++++----
2 files changed, 21 insertions(+), 7 deletions(-)
diff --git a/backend/internal/service/gateway_sanitize_test.go b/backend/internal/service/gateway_sanitize_test.go
index 3b0a07c9..a70c1a00 100644
--- a/backend/internal/service/gateway_sanitize_test.go
+++ b/backend/internal/service/gateway_sanitize_test.go
@@ -9,12 +9,12 @@ import (
func TestSanitizeOpenCodeText_RewritesCanonicalSentence(t *testing.T) {
in := "You are OpenCode, the best coding agent on the planet."
- got := sanitizeOpenCodeText(in)
+ got := sanitizeSystemText(in)
require.Equal(t, strings.TrimSpace(claudeCodeSystemPrompt), got)
}
-func TestSanitizeOpenCodeText_RewritesOpenCodeKeywords(t *testing.T) {
+func TestSanitizeToolText_RewritesOpenCodeKeywords(t *testing.T) {
in := "OpenCode and opencode are mentioned."
- got := sanitizeOpenCodeText(in)
+ got := sanitizeToolText(in)
require.Equal(t, "Claude Code and Claude are mentioned.", got)
}
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 47ea8593..40354e48 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -697,7 +697,10 @@ func normalizeParamNameForOpenCode(name string, cache map[string]string) string
return name
}
-func sanitizeOpenCodeText(text string) string {
+// sanitizeSystemText rewrites only the fixed OpenCode identity sentence (if present).
+// We intentionally avoid broad keyword replacement in system prompts to prevent
+// accidentally changing user-provided instructions.
+func sanitizeSystemText(text string) string {
if text == "" {
return text
}
@@ -709,6 +712,17 @@ func sanitizeOpenCodeText(text string) string {
"You are OpenCode, the best coding agent on the planet.",
strings.TrimSpace(claudeCodeSystemPrompt),
)
+ return text
+}
+
+// sanitizeToolText is intentionally more aggressive than sanitizeSystemText because
+// tool descriptions are not user chat content, and some upstreams may flag "opencode"
+// strings as non-Claude-Code fingerprints.
+func sanitizeToolText(text string) string {
+ if text == "" {
+ return text
+ }
+ text = sanitizeSystemText(text)
text = strings.ReplaceAll(text, "OpenCode", "Claude Code")
text = opencodeTextRe.ReplaceAllString(text, "Claude")
return text
@@ -720,7 +734,7 @@ func sanitizeToolDescription(description string) string {
}
description = toolDescAbsPathRe.ReplaceAllString(description, "[path]")
description = toolDescWinPathRe.ReplaceAllString(description, "[path]")
- return sanitizeOpenCodeText(description)
+ return sanitizeToolText(description)
}
func normalizeToolInputSchema(inputSchema any, cache map[string]string) {
@@ -795,7 +809,7 @@ func normalizeClaudeOAuthRequestBody(body []byte, modelID string, opts claudeOAu
if system, ok := req["system"]; ok {
switch v := system.(type) {
case string:
- sanitized := sanitizeOpenCodeText(v)
+ sanitized := sanitizeSystemText(v)
if sanitized != v {
req["system"] = sanitized
}
@@ -812,7 +826,7 @@ func normalizeClaudeOAuthRequestBody(body []byte, modelID string, opts claudeOAu
if !ok || text == "" {
continue
}
- sanitized := sanitizeOpenCodeText(text)
+ sanitized := sanitizeSystemText(text)
if sanitized != text {
block["text"] = sanitized
}
From 3a34746668f22bd7a96d8c29a84aeb2a08f88bef Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Sat, 31 Jan 2026 02:01:51 +0800
Subject: [PATCH 63/99] refactor: stop rewriting tool descriptions; keep only
system sentence rewrite
---
.../internal/service/gateway_sanitize_test.go | 7 ++++---
backend/internal/service/gateway_service.go | 17 +++--------------
2 files changed, 7 insertions(+), 17 deletions(-)
diff --git a/backend/internal/service/gateway_sanitize_test.go b/backend/internal/service/gateway_sanitize_test.go
index a70c1a00..8fa971ca 100644
--- a/backend/internal/service/gateway_sanitize_test.go
+++ b/backend/internal/service/gateway_sanitize_test.go
@@ -13,8 +13,9 @@ func TestSanitizeOpenCodeText_RewritesCanonicalSentence(t *testing.T) {
require.Equal(t, strings.TrimSpace(claudeCodeSystemPrompt), got)
}
-func TestSanitizeToolText_RewritesOpenCodeKeywords(t *testing.T) {
+func TestSanitizeToolDescription_DoesNotRewriteKeywords(t *testing.T) {
in := "OpenCode and opencode are mentioned."
- got := sanitizeToolText(in)
- require.Equal(t, "Claude Code and Claude are mentioned.", got)
+ got := sanitizeToolDescription(in)
+ // We no longer rewrite tool descriptions; only redact obvious path leaks.
+ require.Equal(t, in, got)
}
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 40354e48..703804a4 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -715,26 +715,15 @@ func sanitizeSystemText(text string) string {
return text
}
-// sanitizeToolText is intentionally more aggressive than sanitizeSystemText because
-// tool descriptions are not user chat content, and some upstreams may flag "opencode"
-// strings as non-Claude-Code fingerprints.
-func sanitizeToolText(text string) string {
- if text == "" {
- return text
- }
- text = sanitizeSystemText(text)
- text = strings.ReplaceAll(text, "OpenCode", "Claude Code")
- text = opencodeTextRe.ReplaceAllString(text, "Claude")
- return text
-}
-
func sanitizeToolDescription(description string) string {
if description == "" {
return description
}
description = toolDescAbsPathRe.ReplaceAllString(description, "[path]")
description = toolDescWinPathRe.ReplaceAllString(description, "[path]")
- return sanitizeToolText(description)
+ // Intentionally do NOT rewrite tool descriptions (OpenCode/Claude strings).
+ // Tool names/skill names may rely on exact wording, and rewriting can be misleading.
+ return description
}
func normalizeToolInputSchema(inputSchema any, cache map[string]string) {
From adb77af1d973ea5dcf234a0776187ef888a3514b Mon Sep 17 00:00:00 2001
From: cyhhao
Date: Sat, 31 Jan 2026 02:07:57 +0800
Subject: [PATCH 64/99] fix: satisfy golangci-lint (nil checks, remove unused
helpers)
---
backend/internal/service/gateway_service.go | 35 +++++++++------------
1 file changed, 15 insertions(+), 20 deletions(-)
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 703804a4..3d39e37c 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -215,7 +215,6 @@ var (
modelFieldRe = regexp.MustCompile(`"model"\s*:\s*"([^"]+)"`)
toolDescAbsPathRe = regexp.MustCompile(`/\/?(?:home|Users|tmp|var|opt|usr|etc)\/[^\s,\)"'\]]+`)
toolDescWinPathRe = regexp.MustCompile(`(?i)[A-Z]:\\[^\s,\)"'\]]+`)
- opencodeTextRe = regexp.MustCompile(`(?i)opencode`)
claudeToolNameOverrides = map[string]string{
"bash": "Bash",
@@ -3320,11 +3319,16 @@ func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Contex
}
}
+ clientHeaders := http.Header{}
+ if c != nil && c.Request != nil {
+ clientHeaders = c.Request.Header
+ }
+
// OAuth账号:应用统一指纹
var fingerprint *Fingerprint
if account.IsOAuth() && s.identityService != nil {
// 1. 获取或创建指纹(包含随机生成的ClientID)
- fp, err := s.identityService.GetOrCreateFingerprint(ctx, account.ID, c.Request.Header)
+ fp, err := s.identityService.GetOrCreateFingerprint(ctx, account.ID, clientHeaders)
if err != nil {
log.Printf("Warning: failed to get fingerprint for account %d: %v", account.ID, err)
// 失败时降级为透传原始headers
@@ -3355,7 +3359,7 @@ func (s *GatewayService) buildUpstreamRequest(ctx context.Context, c *gin.Contex
}
// 白名单透传headers
- for key, values := range c.Request.Header {
+ for key, values := range clientHeaders {
lowerKey := strings.ToLower(key)
if allowedHeaders[lowerKey] {
for _, v := range values {
@@ -3479,20 +3483,6 @@ func requestNeedsBetaFeatures(body []byte) bool {
return false
}
-func requestHasTools(body []byte) bool {
- tools := gjson.GetBytes(body, "tools")
- if !tools.Exists() {
- return false
- }
- if tools.IsArray() {
- return len(tools.Array()) > 0
- }
- if tools.IsObject() {
- return len(tools.Map()) > 0
- }
- return false
-}
-
func defaultAPIKeyBetaHeader(body []byte) string {
modelID := gjson.GetBytes(body, "model").String()
if strings.Contains(strings.ToLower(modelID), "haiku") {
@@ -4804,10 +4794,15 @@ func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Con
}
}
+ clientHeaders := http.Header{}
+ if c != nil && c.Request != nil {
+ clientHeaders = c.Request.Header
+ }
+
// OAuth 账号:应用统一指纹和重写 userID
// 如果启用了会话ID伪装,会在重写后替换 session 部分为固定值
if account.IsOAuth() && s.identityService != nil {
- fp, err := s.identityService.GetOrCreateFingerprint(ctx, account.ID, c.Request.Header)
+ fp, err := s.identityService.GetOrCreateFingerprint(ctx, account.ID, clientHeaders)
if err == nil {
accountUUID := account.GetExtraString("account_uuid")
if accountUUID != "" && fp.ClientID != "" {
@@ -4831,7 +4826,7 @@ func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Con
}
// 白名单透传 headers
- for key, values := range c.Request.Header {
+ for key, values := range clientHeaders {
lowerKey := strings.ToLower(key)
if allowedHeaders[lowerKey] {
for _, v := range values {
@@ -4842,7 +4837,7 @@ func (s *GatewayService) buildCountTokensRequest(ctx context.Context, c *gin.Con
// OAuth 账号:应用指纹到请求头
if account.IsOAuth() && s.identityService != nil {
- fp, _ := s.identityService.GetOrCreateFingerprint(ctx, account.ID, c.Request.Header)
+ fp, _ := s.identityService.GetOrCreateFingerprint(ctx, account.ID, clientHeaders)
if fp != nil {
s.identityService.ApplyFingerprint(req, fp)
}
From f2e206700ce3754eeb56a8c6310dd69afc83152a Mon Sep 17 00:00:00 2001
From: iBenzene
Date: Sat, 31 Jan 2026 00:53:39 +0800
Subject: [PATCH 65/99] feat: add support for using TLS to connect to Redis
---
backend/internal/config/config.go | 3 ++
backend/internal/repository/redis.go | 12 +++++++-
backend/internal/repository/redis_test.go | 12 ++++++++
backend/internal/setup/cli.go | 3 ++
backend/internal/setup/handler.go | 18 ++++++-----
backend/internal/setup/setup.go | 32 ++++++++++++++------
config.yaml | 3 ++
deploy/.env.example | 1 +
deploy/config.example.yaml | 3 ++
deploy/docker-compose.standalone.yml | 1 +
deploy/docker-compose.yml | 1 +
frontend/src/api/setup.ts | 1 +
frontend/src/i18n/locales/en.ts | 4 ++-
frontend/src/i18n/locales/zh.ts | 4 ++-
frontend/src/views/setup/SetupWizardView.vue | 15 ++++++++-
15 files changed, 91 insertions(+), 22 deletions(-)
diff --git a/backend/internal/config/config.go b/backend/internal/config/config.go
index 477cb59d..84be445b 100644
--- a/backend/internal/config/config.go
+++ b/backend/internal/config/config.go
@@ -415,6 +415,8 @@ type RedisConfig struct {
PoolSize int `mapstructure:"pool_size"`
// MinIdleConns: 最小空闲连接数,保持热连接减少冷启动延迟
MinIdleConns int `mapstructure:"min_idle_conns"`
+ // EnableTLS: 是否启用 TLS/SSL 连接
+ EnableTLS bool `mapstructure:"enable_tls"`
}
func (r *RedisConfig) Address() string {
@@ -762,6 +764,7 @@ func setDefaults() {
viper.SetDefault("redis.write_timeout_seconds", 3)
viper.SetDefault("redis.pool_size", 128)
viper.SetDefault("redis.min_idle_conns", 10)
+ viper.SetDefault("redis.enable_tls", false)
// Ops (vNext)
viper.SetDefault("ops.enabled", true)
diff --git a/backend/internal/repository/redis.go b/backend/internal/repository/redis.go
index f3606ad9..ee6b2a59 100644
--- a/backend/internal/repository/redis.go
+++ b/backend/internal/repository/redis.go
@@ -1,6 +1,7 @@
package repository
import (
+ "crypto/tls"
"time"
"github.com/Wei-Shaw/sub2api/internal/config"
@@ -26,7 +27,7 @@ func InitRedis(cfg *config.Config) *redis.Client {
// buildRedisOptions 构建 Redis 连接选项
// 从配置文件读取连接池和超时参数,支持生产环境调优
func buildRedisOptions(cfg *config.Config) *redis.Options {
- return &redis.Options{
+ opts := &redis.Options{
Addr: cfg.Redis.Address(),
Password: cfg.Redis.Password,
DB: cfg.Redis.DB,
@@ -36,4 +37,13 @@ func buildRedisOptions(cfg *config.Config) *redis.Options {
PoolSize: cfg.Redis.PoolSize, // 连接池大小
MinIdleConns: cfg.Redis.MinIdleConns, // 最小空闲连接
}
+
+ if cfg.Redis.EnableTLS {
+ opts.TLSConfig = &tls.Config{
+ MinVersion: tls.VersionTLS12,
+ ServerName: cfg.Redis.Host,
+ }
+ }
+
+ return opts
}
diff --git a/backend/internal/repository/redis_test.go b/backend/internal/repository/redis_test.go
index 756a63dc..7cb31002 100644
--- a/backend/internal/repository/redis_test.go
+++ b/backend/internal/repository/redis_test.go
@@ -32,4 +32,16 @@ func TestBuildRedisOptions(t *testing.T) {
require.Equal(t, 4*time.Second, opts.WriteTimeout)
require.Equal(t, 100, opts.PoolSize)
require.Equal(t, 10, opts.MinIdleConns)
+ require.Nil(t, opts.TLSConfig)
+
+ // Test case with TLS enabled
+ cfgTLS := &config.Config{
+ Redis: config.RedisConfig{
+ Host: "localhost",
+ EnableTLS: true,
+ },
+ }
+ optsTLS := buildRedisOptions(cfgTLS)
+ require.NotNil(t, optsTLS.TLSConfig)
+ require.Equal(t, "localhost", optsTLS.TLSConfig.ServerName)
}
diff --git a/backend/internal/setup/cli.go b/backend/internal/setup/cli.go
index 03ac3f66..2b323acf 100644
--- a/backend/internal/setup/cli.go
+++ b/backend/internal/setup/cli.go
@@ -149,6 +149,8 @@ func RunCLI() error {
fmt.Println(" Invalid Redis DB. Must be between 0 and 15.")
}
+ cfg.Redis.EnableTLS = promptConfirm(reader, "Enable Redis TLS?")
+
fmt.Println()
fmt.Print("Testing Redis connection... ")
if err := TestRedisConnection(&cfg.Redis); err != nil {
@@ -205,6 +207,7 @@ func RunCLI() error {
fmt.Println("── Configuration Summary ──")
fmt.Printf("Database: %s@%s:%d/%s\n", cfg.Database.User, cfg.Database.Host, cfg.Database.Port, cfg.Database.DBName)
fmt.Printf("Redis: %s:%d\n", cfg.Redis.Host, cfg.Redis.Port)
+ fmt.Printf("Redis TLS: %s\n", map[bool]string{true: "enabled", false: "disabled"}[cfg.Redis.EnableTLS])
fmt.Printf("Admin: %s\n", cfg.Admin.Email)
fmt.Printf("Server: :%d\n", cfg.Server.Port)
fmt.Println()
diff --git a/backend/internal/setup/handler.go b/backend/internal/setup/handler.go
index 1c613dfd..1531c97b 100644
--- a/backend/internal/setup/handler.go
+++ b/backend/internal/setup/handler.go
@@ -176,10 +176,11 @@ func testDatabase(c *gin.Context) {
// TestRedisRequest represents Redis test request
type TestRedisRequest struct {
- Host string `json:"host" binding:"required"`
- Port int `json:"port" binding:"required"`
- Password string `json:"password"`
- DB int `json:"db"`
+ Host string `json:"host" binding:"required"`
+ Port int `json:"port" binding:"required"`
+ Password string `json:"password"`
+ DB int `json:"db"`
+ EnableTLS bool `json:"enable_tls"`
}
// testRedis tests Redis connection
@@ -205,10 +206,11 @@ func testRedis(c *gin.Context) {
}
cfg := &RedisConfig{
- Host: req.Host,
- Port: req.Port,
- Password: req.Password,
- DB: req.DB,
+ Host: req.Host,
+ Port: req.Port,
+ Password: req.Password,
+ DB: req.DB,
+ EnableTLS: req.EnableTLS,
}
if err := TestRedisConnection(cfg); err != nil {
diff --git a/backend/internal/setup/setup.go b/backend/internal/setup/setup.go
index 65118161..f81f75cf 100644
--- a/backend/internal/setup/setup.go
+++ b/backend/internal/setup/setup.go
@@ -3,6 +3,7 @@ package setup
import (
"context"
"crypto/rand"
+ "crypto/tls"
"database/sql"
"encoding/hex"
"fmt"
@@ -79,10 +80,11 @@ type DatabaseConfig struct {
}
type RedisConfig struct {
- Host string `json:"host" yaml:"host"`
- Port int `json:"port" yaml:"port"`
- Password string `json:"password" yaml:"password"`
- DB int `json:"db" yaml:"db"`
+ Host string `json:"host" yaml:"host"`
+ Port int `json:"port" yaml:"port"`
+ Password string `json:"password" yaml:"password"`
+ DB int `json:"db" yaml:"db"`
+ EnableTLS bool `json:"enable_tls" yaml:"enable_tls"`
}
type AdminConfig struct {
@@ -199,11 +201,20 @@ func TestDatabaseConnection(cfg *DatabaseConfig) error {
// TestRedisConnection tests the Redis connection
func TestRedisConnection(cfg *RedisConfig) error {
- rdb := redis.NewClient(&redis.Options{
+ opts := &redis.Options{
Addr: fmt.Sprintf("%s:%d", cfg.Host, cfg.Port),
Password: cfg.Password,
DB: cfg.DB,
- })
+ }
+
+ if cfg.EnableTLS {
+ opts.TLSConfig = &tls.Config{
+ MinVersion: tls.VersionTLS12,
+ ServerName: cfg.Host,
+ }
+ }
+
+ rdb := redis.NewClient(opts)
defer func() {
if err := rdb.Close(); err != nil {
log.Printf("failed to close redis client: %v", err)
@@ -485,10 +496,11 @@ func AutoSetupFromEnv() error {
SSLMode: getEnvOrDefault("DATABASE_SSLMODE", "disable"),
},
Redis: RedisConfig{
- Host: getEnvOrDefault("REDIS_HOST", "localhost"),
- Port: getEnvIntOrDefault("REDIS_PORT", 6379),
- Password: getEnvOrDefault("REDIS_PASSWORD", ""),
- DB: getEnvIntOrDefault("REDIS_DB", 0),
+ Host: getEnvOrDefault("REDIS_HOST", "localhost"),
+ Port: getEnvIntOrDefault("REDIS_PORT", 6379),
+ Password: getEnvOrDefault("REDIS_PASSWORD", ""),
+ DB: getEnvIntOrDefault("REDIS_DB", 0),
+ EnableTLS: getEnvOrDefault("REDIS_ENABLE_TLS", "false") == "true",
},
Admin: AdminConfig{
Email: getEnvOrDefault("ADMIN_EMAIL", "admin@sub2api.local"),
diff --git a/config.yaml b/config.yaml
index 5e7513fb..19f77221 100644
--- a/config.yaml
+++ b/config.yaml
@@ -322,6 +322,9 @@ redis:
# Database number (0-15)
# 数据库编号(0-15)
db: 0
+ # Enable TLS/SSL connection
+ # 是否启用 TLS/SSL 连接
+ enable_tls: false
# =============================================================================
# Ops Monitoring (Optional)
diff --git a/deploy/.env.example b/deploy/.env.example
index 1e9395a0..25096c3d 100644
--- a/deploy/.env.example
+++ b/deploy/.env.example
@@ -40,6 +40,7 @@ POSTGRES_DB=sub2api
# Leave empty for no password (default for local development)
REDIS_PASSWORD=
REDIS_DB=0
+REDIS_ENABLE_TLS=false
# -----------------------------------------------------------------------------
# Admin Account
diff --git a/deploy/config.example.yaml b/deploy/config.example.yaml
index 98aba8f5..6f5e9744 100644
--- a/deploy/config.example.yaml
+++ b/deploy/config.example.yaml
@@ -376,6 +376,9 @@ redis:
# Database number (0-15)
# 数据库编号(0-15)
db: 0
+ # Enable TLS/SSL connection
+ # 是否启用 TLS/SSL 连接
+ enable_tls: false
# =============================================================================
# Ops Monitoring (Optional)
diff --git a/deploy/docker-compose.standalone.yml b/deploy/docker-compose.standalone.yml
index 1bf247c7..97903bc5 100644
--- a/deploy/docker-compose.standalone.yml
+++ b/deploy/docker-compose.standalone.yml
@@ -56,6 +56,7 @@ services:
- REDIS_PORT=${REDIS_PORT:-6379}
- REDIS_PASSWORD=${REDIS_PASSWORD:-}
- REDIS_DB=${REDIS_DB:-0}
+ - REDIS_ENABLE_TLS=${REDIS_ENABLE_TLS:-false}
# =======================================================================
# Admin Account (auto-created on first run)
diff --git a/deploy/docker-compose.yml b/deploy/docker-compose.yml
index ac6008d2..033731ac 100644
--- a/deploy/docker-compose.yml
+++ b/deploy/docker-compose.yml
@@ -62,6 +62,7 @@ services:
- REDIS_PORT=6379
- REDIS_PASSWORD=${REDIS_PASSWORD:-}
- REDIS_DB=${REDIS_DB:-0}
+ - REDIS_ENABLE_TLS=${REDIS_ENABLE_TLS:-false}
# =======================================================================
# Admin Account (auto-created on first run)
diff --git a/frontend/src/api/setup.ts b/frontend/src/api/setup.ts
index 8b744590..1097c95b 100644
--- a/frontend/src/api/setup.ts
+++ b/frontend/src/api/setup.ts
@@ -31,6 +31,7 @@ export interface RedisConfig {
port: number
password: string
db: number
+ enable_tls: boolean
}
export interface AdminConfig {
diff --git a/frontend/src/i18n/locales/en.ts b/frontend/src/i18n/locales/en.ts
index dc93d37c..64b589df 100644
--- a/frontend/src/i18n/locales/en.ts
+++ b/frontend/src/i18n/locales/en.ts
@@ -69,7 +69,9 @@ export default {
port: 'Port',
password: 'Password (optional)',
database: 'Database',
- passwordPlaceholder: 'Password'
+ passwordPlaceholder: 'Password',
+ enableTls: 'Enable TLS',
+ enableTlsHint: 'Use TLS when connecting to Redis (public CA certs)'
},
admin: {
title: 'Admin Account',
diff --git a/frontend/src/i18n/locales/zh.ts b/frontend/src/i18n/locales/zh.ts
index 4b6a9be6..19378915 100644
--- a/frontend/src/i18n/locales/zh.ts
+++ b/frontend/src/i18n/locales/zh.ts
@@ -66,7 +66,9 @@ export default {
port: '端口',
password: '密码(可选)',
database: '数据库',
- passwordPlaceholder: '密码'
+ passwordPlaceholder: '密码',
+ enableTls: '启用 TLS',
+ enableTlsHint: '连接 Redis 时使用 TLS(公共 CA 证书)'
},
admin: {
title: '管理员账户',
diff --git a/frontend/src/views/setup/SetupWizardView.vue b/frontend/src/views/setup/SetupWizardView.vue
index 2be837f5..00f437ba 100644
--- a/frontend/src/views/setup/SetupWizardView.vue
+++ b/frontend/src/views/setup/SetupWizardView.vue
@@ -91,6 +91,18 @@
+
+
+
+ {{ t("setup.redis.enableTls") }}
+
+
+ {{ t("setup.redis.enableTlsHint") }}
+
+
+
+
+
@@ -517,7 +529,8 @@ const formData = reactive
({
host: 'localhost',
port: 6379,
password: '',
- db: 0
+ db: 0,
+ enable_tls: false
},
admin: {
email: '',
From 35f39ca2912a8f7f269368669556fc5dcf8fd7b8 Mon Sep 17 00:00:00 2001
From: iBenzene
Date: Sat, 31 Jan 2026 19:06:19 +0800
Subject: [PATCH 66/99] =?UTF-8?q?chore:=20=E4=BF=AE=E5=A4=8D=E4=BA=86=20re?=
=?UTF-8?q?dis.go=20=E4=B8=AD=E4=BB=A3=E7=A0=81=E9=A3=8E=E6=A0=BC=EF=BC=88?=
=?UTF-8?q?golangci-lint=EF=BC=89=E7=9A=84=E9=97=AE=E9=A2=98?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
backend/internal/repository/redis.go | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/backend/internal/repository/redis.go b/backend/internal/repository/redis.go
index ee6b2a59..2b4ee4e6 100644
--- a/backend/internal/repository/redis.go
+++ b/backend/internal/repository/redis.go
@@ -40,8 +40,8 @@ func buildRedisOptions(cfg *config.Config) *redis.Options {
if cfg.Redis.EnableTLS {
opts.TLSConfig = &tls.Config{
- MinVersion: tls.VersionTLS12,
- ServerName: cfg.Redis.Host,
+ MinVersion: tls.VersionTLS12,
+ ServerName: cfg.Redis.Host,
}
}
From bbc7b4aeed6d33e6b6f7d42991edd949c9de8833 Mon Sep 17 00:00:00 2001
From: liuxiongfeng
Date: Sun, 1 Feb 2026 16:29:27 +0800
Subject: [PATCH 67/99] =?UTF-8?q?feat(gateway):=20Gemini=20API=20Key=20?=
=?UTF-8?q?=E8=B4=A6=E6=88=B7=E8=B7=B3=E8=BF=87=E6=A8=A1=E5=9E=8B=E6=98=A0?=
=?UTF-8?q?=E5=B0=84=E6=A3=80=E6=9F=A5=EF=BC=8C=E7=9B=B4=E6=8E=A5=E9=80=8F?=
=?UTF-8?q?=E4=BC=A0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Gemini API Key 账户通常代理上游服务,模型支持由上游判断,
本地不需要预先配置模型映射。
---
backend/internal/service/gateway_service.go | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 2e3ba93e..7a901907 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -1893,6 +1893,10 @@ func (s *GatewayService) isModelSupportedByAccount(account *Account, requestedMo
// Antigravity 平台使用专门的模型支持检查
return IsAntigravityModelSupported(requestedModel)
}
+ // Gemini API Key 账户直接透传,由上游判断模型是否支持
+ if account.Platform == PlatformGemini && account.Type == AccountTypeAPIKey {
+ return true
+ }
// 其他平台使用账户的模型支持检查
return account.IsModelSupported(requestedModel)
}
From 4bfeeecb05a193719cec5d676e268f23a4ede1d0 Mon Sep 17 00:00:00 2001
From: liuxiongfeng
Date: Mon, 2 Feb 2026 12:50:18 +0800
Subject: [PATCH 68/99] =?UTF-8?q?fix(billing):=20=E4=BF=AE=E5=A4=8D=20Gemi?=
=?UTF-8?q?ni=20=E6=8E=A5=E5=8F=A3=E7=BC=93=E5=AD=98=20token=20=E7=BB=9F?=
=?UTF-8?q?=E8=AE=A1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
extractGeminiUsage 函数未提取 cachedContentTokenCount,
导致计费时缓存读取 token 始终为 0。
修复:
- 提取 usageMetadata.cachedContentTokenCount
- 设置 CacheReadInputTokens 字段
- InputTokens 减去缓存 token(与 response_transformer 逻辑一致)
---
.../internal/service/gemini_messages_compat_service.go | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
diff --git a/backend/internal/service/gemini_messages_compat_service.go b/backend/internal/service/gemini_messages_compat_service.go
index aea880c2..659cdf03 100644
--- a/backend/internal/service/gemini_messages_compat_service.go
+++ b/backend/internal/service/gemini_messages_compat_service.go
@@ -2522,9 +2522,13 @@ func extractGeminiUsage(geminiResp map[string]any) *ClaudeUsage {
}
prompt, _ := asInt(usageMeta["promptTokenCount"])
cand, _ := asInt(usageMeta["candidatesTokenCount"])
+ cached, _ := asInt(usageMeta["cachedContentTokenCount"])
+ // 注意:Gemini 的 promptTokenCount 包含 cachedContentTokenCount,
+ // 但 Claude 的 input_tokens 不包含 cache_read_input_tokens,需要减去
return &ClaudeUsage{
- InputTokens: prompt,
- OutputTokens: cand,
+ InputTokens: prompt - cached,
+ OutputTokens: cand,
+ CacheReadInputTokens: cached,
}
}
From bbdc8663d32ba4db75cf3255ba71b1b469ed22d3 Mon Sep 17 00:00:00 2001
From: shaw
Date: Mon, 2 Feb 2026 14:57:09 +0800
Subject: [PATCH 69/99] =?UTF-8?q?feat:=20=E9=87=8D=E6=96=B0=E8=AE=BE?=
=?UTF-8?q?=E8=AE=A1=E5=85=AC=E5=91=8A=E7=B3=BB=E7=BB=9F=E4=B8=BAHeader?=
=?UTF-8?q?=E9=93=83=E9=93=9B=E9=80=9A=E7=9F=A5?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
- 新增 AnnouncementBell 组件,支持 Modal 弹窗和 Markdown 渲染
- 移除 Dashboard 横幅和独立公告页面
- 铃铛位置在 Header 文档按钮左侧,显示未读红点
- 支持点击查看详情、标记已读、全部已读等操作
- 完善国际化,移除所有硬编码中文
- 修复 AnnouncementTargetingEditor watch 循环问题
---
frontend/package-lock.json | 7212 -----------------
frontend/package.json | 3 +
frontend/pnpm-lock.yaml | 17 +
.../AnnouncementTargetingEditor.vue | 32 +-
.../components/common/AnnouncementBell.vue | 626 ++
frontend/src/components/icons/Icon.vue | 3 +
frontend/src/components/layout/AppHeader.vue | 6 +-
frontend/src/components/layout/AppSidebar.vue | 2 -
frontend/src/i18n/locales/en.ts | 11 +-
frontend/src/i18n/locales/zh.ts | 11 +-
frontend/src/router/index.ts | 12 -
frontend/src/utils/format.ts | 19 +
frontend/src/views/user/AnnouncementsView.vue | 140 -
13 files changed, 719 insertions(+), 7375 deletions(-)
delete mode 100644 frontend/package-lock.json
create mode 100644 frontend/src/components/common/AnnouncementBell.vue
delete mode 100644 frontend/src/views/user/AnnouncementsView.vue
diff --git a/frontend/package-lock.json b/frontend/package-lock.json
deleted file mode 100644
index 5c43a6a8..00000000
--- a/frontend/package-lock.json
+++ /dev/null
@@ -1,7212 +0,0 @@
-{
- "name": "sub2api-frontend",
- "version": "1.0.0",
- "lockfileVersion": 3,
- "requires": true,
- "packages": {
- "": {
- "name": "sub2api-frontend",
- "version": "1.0.0",
- "dependencies": {
- "@lobehub/icons": "^4.0.2",
- "@vueuse/core": "^10.7.0",
- "axios": "^1.6.2",
- "chart.js": "^4.4.1",
- "driver.js": "^1.4.0",
- "file-saver": "^2.0.5",
- "pinia": "^2.1.7",
- "qrcode": "^1.5.4",
- "vue": "^3.4.0",
- "vue-chartjs": "^5.3.0",
- "vue-i18n": "^9.14.5",
- "vue-router": "^4.2.5",
- "xlsx": "^0.18.5"
- },
- "devDependencies": {
- "@types/file-saver": "^2.0.7",
- "@types/mdx": "^2.0.13",
- "@types/node": "^20.10.5",
- "@types/qrcode": "^1.5.6",
- "@typescript-eslint/eslint-plugin": "^7.18.0",
- "@typescript-eslint/parser": "^7.18.0",
- "@vitejs/plugin-vue": "^5.2.3",
- "@vitest/coverage-v8": "^2.1.9",
- "@vue/test-utils": "^2.4.6",
- "autoprefixer": "^10.4.16",
- "eslint": "^8.57.0",
- "eslint-plugin-vue": "^9.25.0",
- "jsdom": "^24.1.3",
- "postcss": "^8.4.32",
- "tailwindcss": "^3.4.0",
- "typescript": "~5.6.0",
- "vite": "^5.0.10",
- "vite-plugin-checker": "^0.9.1",
- "vitest": "^2.1.9",
- "vue-tsc": "^2.2.0"
- }
- },
- "node_modules/@alloc/quick-lru": {
- "version": "5.2.0",
- "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz",
- "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/@ampproject/remapping": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz",
- "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==",
- "dev": true,
- "license": "Apache-2.0",
- "dependencies": {
- "@jridgewell/gen-mapping": "^0.3.5",
- "@jridgewell/trace-mapping": "^0.3.24"
- },
- "engines": {
- "node": ">=6.0.0"
- }
- },
- "node_modules/@ant-design/cssinjs": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/@ant-design/cssinjs/-/cssinjs-2.0.2.tgz",
- "integrity": "sha512-7KDVIigtqlamOLtJ0hbjECX/sDGDaJXsM/KHala8I/1E4lpl9RAO585kbVvh/k1rIrFAV6JeGkXmdWyYj9XvuA==",
- "license": "MIT",
- "dependencies": {
- "@babel/runtime": "^7.11.1",
- "@emotion/hash": "^0.8.0",
- "@emotion/unitless": "^0.7.5",
- "@rc-component/util": "^1.4.0",
- "clsx": "^2.1.1",
- "csstype": "^3.1.3",
- "stylis": "^4.3.4"
- },
- "peerDependencies": {
- "react": ">=16.0.0",
- "react-dom": ">=16.0.0"
- }
- },
- "node_modules/@asamuzakjp/css-color": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-3.2.0.tgz",
- "integrity": "sha512-K1A6z8tS3XsmCMM86xoWdn7Fkdn9m6RSVtocUrJYIwZnFVkng/PvkEoWtOWmP+Scc6saYWHWZYbndEEXxl24jw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@csstools/css-calc": "^2.1.3",
- "@csstools/css-color-parser": "^3.0.9",
- "@csstools/css-parser-algorithms": "^3.0.4",
- "@csstools/css-tokenizer": "^3.0.3",
- "lru-cache": "^10.4.3"
- }
- },
- "node_modules/@babel/code-frame": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
- "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==",
- "license": "MIT",
- "dependencies": {
- "@babel/helper-validator-identifier": "^7.27.1",
- "js-tokens": "^4.0.0",
- "picocolors": "^1.1.1"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/generator": {
- "version": "7.28.5",
- "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz",
- "integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==",
- "license": "MIT",
- "dependencies": {
- "@babel/parser": "^7.28.5",
- "@babel/types": "^7.28.5",
- "@jridgewell/gen-mapping": "^0.3.12",
- "@jridgewell/trace-mapping": "^0.3.28",
- "jsesc": "^3.0.2"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-globals": {
- "version": "7.28.0",
- "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz",
- "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==",
- "license": "MIT",
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-module-imports": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz",
- "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==",
- "license": "MIT",
- "dependencies": {
- "@babel/traverse": "^7.27.1",
- "@babel/types": "^7.27.1"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-string-parser": {
- "version": "7.27.1",
- "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz",
- "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==",
- "license": "MIT",
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/helper-validator-identifier": {
- "version": "7.28.5",
- "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz",
- "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==",
- "license": "MIT",
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/parser": {
- "version": "7.28.5",
- "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz",
- "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==",
- "license": "MIT",
- "dependencies": {
- "@babel/types": "^7.28.5"
- },
- "bin": {
- "parser": "bin/babel-parser.js"
- },
- "engines": {
- "node": ">=6.0.0"
- }
- },
- "node_modules/@babel/runtime": {
- "version": "7.28.4",
- "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz",
- "integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==",
- "license": "MIT",
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/template": {
- "version": "7.27.2",
- "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz",
- "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==",
- "license": "MIT",
- "dependencies": {
- "@babel/code-frame": "^7.27.1",
- "@babel/parser": "^7.27.2",
- "@babel/types": "^7.27.1"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/traverse": {
- "version": "7.28.5",
- "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz",
- "integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==",
- "license": "MIT",
- "dependencies": {
- "@babel/code-frame": "^7.27.1",
- "@babel/generator": "^7.28.5",
- "@babel/helper-globals": "^7.28.0",
- "@babel/parser": "^7.28.5",
- "@babel/template": "^7.27.2",
- "@babel/types": "^7.28.5",
- "debug": "^4.3.1"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@babel/types": {
- "version": "7.28.5",
- "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz",
- "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==",
- "license": "MIT",
- "dependencies": {
- "@babel/helper-string-parser": "^7.27.1",
- "@babel/helper-validator-identifier": "^7.28.5"
- },
- "engines": {
- "node": ">=6.9.0"
- }
- },
- "node_modules/@bcoe/v8-coverage": {
- "version": "0.2.3",
- "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz",
- "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/@csstools/color-helpers": {
- "version": "5.1.0",
- "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-5.1.0.tgz",
- "integrity": "sha512-S11EXWJyy0Mz5SYvRmY8nJYTFFd1LCNV+7cXyAgQtOOuzb4EsgfqDufL+9esx72/eLhsRdGZwaldu/h+E4t4BA==",
- "dev": true,
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/csstools"
- },
- {
- "type": "opencollective",
- "url": "https://opencollective.com/csstools"
- }
- ],
- "license": "MIT-0",
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@csstools/css-calc": {
- "version": "2.1.4",
- "resolved": "https://registry.npmjs.org/@csstools/css-calc/-/css-calc-2.1.4.tgz",
- "integrity": "sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ==",
- "dev": true,
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/csstools"
- },
- {
- "type": "opencollective",
- "url": "https://opencollective.com/csstools"
- }
- ],
- "license": "MIT",
- "engines": {
- "node": ">=18"
- },
- "peerDependencies": {
- "@csstools/css-parser-algorithms": "^3.0.5",
- "@csstools/css-tokenizer": "^3.0.4"
- }
- },
- "node_modules/@csstools/css-color-parser": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/@csstools/css-color-parser/-/css-color-parser-3.1.0.tgz",
- "integrity": "sha512-nbtKwh3a6xNVIp/VRuXV64yTKnb1IjTAEEh3irzS+HkKjAOYLTGNb9pmVNntZ8iVBHcWDA2Dof0QtPgFI1BaTA==",
- "dev": true,
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/csstools"
- },
- {
- "type": "opencollective",
- "url": "https://opencollective.com/csstools"
- }
- ],
- "license": "MIT",
- "dependencies": {
- "@csstools/color-helpers": "^5.1.0",
- "@csstools/css-calc": "^2.1.4"
- },
- "engines": {
- "node": ">=18"
- },
- "peerDependencies": {
- "@csstools/css-parser-algorithms": "^3.0.5",
- "@csstools/css-tokenizer": "^3.0.4"
- }
- },
- "node_modules/@csstools/css-parser-algorithms": {
- "version": "3.0.5",
- "resolved": "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.5.tgz",
- "integrity": "sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==",
- "dev": true,
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/csstools"
- },
- {
- "type": "opencollective",
- "url": "https://opencollective.com/csstools"
- }
- ],
- "license": "MIT",
- "engines": {
- "node": ">=18"
- },
- "peerDependencies": {
- "@csstools/css-tokenizer": "^3.0.4"
- }
- },
- "node_modules/@csstools/css-tokenizer": {
- "version": "3.0.4",
- "resolved": "https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-3.0.4.tgz",
- "integrity": "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==",
- "dev": true,
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/csstools"
- },
- {
- "type": "opencollective",
- "url": "https://opencollective.com/csstools"
- }
- ],
- "license": "MIT",
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/@emotion/babel-plugin": {
- "version": "11.13.5",
- "resolved": "https://registry.npmjs.org/@emotion/babel-plugin/-/babel-plugin-11.13.5.tgz",
- "integrity": "sha512-pxHCpT2ex+0q+HH91/zsdHkw/lXd468DIN2zvfvLtPKLLMo6gQj7oLObq8PhkrxOZb/gGCq03S3Z7PDhS8pduQ==",
- "license": "MIT",
- "dependencies": {
- "@babel/helper-module-imports": "^7.16.7",
- "@babel/runtime": "^7.18.3",
- "@emotion/hash": "^0.9.2",
- "@emotion/memoize": "^0.9.0",
- "@emotion/serialize": "^1.3.3",
- "babel-plugin-macros": "^3.1.0",
- "convert-source-map": "^1.5.0",
- "escape-string-regexp": "^4.0.0",
- "find-root": "^1.1.0",
- "source-map": "^0.5.7",
- "stylis": "4.2.0"
- }
- },
- "node_modules/@emotion/babel-plugin/node_modules/@emotion/hash": {
- "version": "0.9.2",
- "resolved": "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.2.tgz",
- "integrity": "sha512-MyqliTZGuOm3+5ZRSaaBGP3USLw6+EGykkwZns2EPC5g8jJ4z9OrdZY9apkl3+UP9+sdz76YYkwCKP5gh8iY3g==",
- "license": "MIT"
- },
- "node_modules/@emotion/babel-plugin/node_modules/stylis": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.2.0.tgz",
- "integrity": "sha512-Orov6g6BB1sDfYgzWfTHDOxamtX1bE/zo104Dh9e6fqJ3PooipYyfJ0pUmrZO2wAvO8YbEyeFrkV91XTsGMSrw==",
- "license": "MIT"
- },
- "node_modules/@emotion/cache": {
- "version": "11.14.0",
- "resolved": "https://registry.npmjs.org/@emotion/cache/-/cache-11.14.0.tgz",
- "integrity": "sha512-L/B1lc/TViYk4DcpGxtAVbx0ZyiKM5ktoIyafGkH6zg/tj+mA+NE//aPYKG0k8kCHSHVJrpLpcAlOBEXQ3SavA==",
- "license": "MIT",
- "dependencies": {
- "@emotion/memoize": "^0.9.0",
- "@emotion/sheet": "^1.4.0",
- "@emotion/utils": "^1.4.2",
- "@emotion/weak-memoize": "^0.4.0",
- "stylis": "4.2.0"
- }
- },
- "node_modules/@emotion/cache/node_modules/stylis": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.2.0.tgz",
- "integrity": "sha512-Orov6g6BB1sDfYgzWfTHDOxamtX1bE/zo104Dh9e6fqJ3PooipYyfJ0pUmrZO2wAvO8YbEyeFrkV91XTsGMSrw==",
- "license": "MIT"
- },
- "node_modules/@emotion/css": {
- "version": "11.13.5",
- "resolved": "https://registry.npmjs.org/@emotion/css/-/css-11.13.5.tgz",
- "integrity": "sha512-wQdD0Xhkn3Qy2VNcIzbLP9MR8TafI0MJb7BEAXKp+w4+XqErksWR4OXomuDzPsN4InLdGhVe6EYcn2ZIUCpB8w==",
- "license": "MIT",
- "dependencies": {
- "@emotion/babel-plugin": "^11.13.5",
- "@emotion/cache": "^11.13.5",
- "@emotion/serialize": "^1.3.3",
- "@emotion/sheet": "^1.4.0",
- "@emotion/utils": "^1.4.2"
- }
- },
- "node_modules/@emotion/hash": {
- "version": "0.8.0",
- "resolved": "https://registry.npmjs.org/@emotion/hash/-/hash-0.8.0.tgz",
- "integrity": "sha512-kBJtf7PH6aWwZ6fka3zQ0p6SBYzx4fl1LoZXE2RrnYST9Xljm7WfKJrU4g/Xr3Beg72MLrp1AWNUmuYJTL7Cow==",
- "license": "MIT"
- },
- "node_modules/@emotion/memoize": {
- "version": "0.9.0",
- "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.9.0.tgz",
- "integrity": "sha512-30FAj7/EoJ5mwVPOWhAyCX+FPfMDrVecJAM+Iw9NRoSl4BBAQeqj4cApHHUXOVvIPgLVDsCFoz/hGD+5QQD1GQ==",
- "license": "MIT"
- },
- "node_modules/@emotion/react": {
- "version": "11.14.0",
- "resolved": "https://registry.npmjs.org/@emotion/react/-/react-11.14.0.tgz",
- "integrity": "sha512-O000MLDBDdk/EohJPFUqvnp4qnHeYkVP5B0xEG0D/L7cOKP9kefu2DXn8dj74cQfsEzUqh+sr1RzFqiL1o+PpA==",
- "license": "MIT",
- "dependencies": {
- "@babel/runtime": "^7.18.3",
- "@emotion/babel-plugin": "^11.13.5",
- "@emotion/cache": "^11.14.0",
- "@emotion/serialize": "^1.3.3",
- "@emotion/use-insertion-effect-with-fallbacks": "^1.2.0",
- "@emotion/utils": "^1.4.2",
- "@emotion/weak-memoize": "^0.4.0",
- "hoist-non-react-statics": "^3.3.1"
- },
- "peerDependencies": {
- "react": ">=16.8.0"
- },
- "peerDependenciesMeta": {
- "@types/react": {
- "optional": true
- }
- }
- },
- "node_modules/@emotion/serialize": {
- "version": "1.3.3",
- "resolved": "https://registry.npmjs.org/@emotion/serialize/-/serialize-1.3.3.tgz",
- "integrity": "sha512-EISGqt7sSNWHGI76hC7x1CksiXPahbxEOrC5RjmFRJTqLyEK9/9hZvBbiYn70dw4wuwMKiEMCUlR6ZXTSWQqxA==",
- "license": "MIT",
- "dependencies": {
- "@emotion/hash": "^0.9.2",
- "@emotion/memoize": "^0.9.0",
- "@emotion/unitless": "^0.10.0",
- "@emotion/utils": "^1.4.2",
- "csstype": "^3.0.2"
- }
- },
- "node_modules/@emotion/serialize/node_modules/@emotion/hash": {
- "version": "0.9.2",
- "resolved": "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.2.tgz",
- "integrity": "sha512-MyqliTZGuOm3+5ZRSaaBGP3USLw6+EGykkwZns2EPC5g8jJ4z9OrdZY9apkl3+UP9+sdz76YYkwCKP5gh8iY3g==",
- "license": "MIT"
- },
- "node_modules/@emotion/serialize/node_modules/@emotion/unitless": {
- "version": "0.10.0",
- "resolved": "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.10.0.tgz",
- "integrity": "sha512-dFoMUuQA20zvtVTuxZww6OHoJYgrzfKM1t52mVySDJnMSEa08ruEvdYQbhvyu6soU+NeLVd3yKfTfT0NeV6qGg==",
- "license": "MIT"
- },
- "node_modules/@emotion/sheet": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/@emotion/sheet/-/sheet-1.4.0.tgz",
- "integrity": "sha512-fTBW9/8r2w3dXWYM4HCB1Rdp8NLibOw2+XELH5m5+AkWiL/KqYX6dc0kKYlaYyKjrQ6ds33MCdMPEwgs2z1rqg==",
- "license": "MIT"
- },
- "node_modules/@emotion/unitless": {
- "version": "0.7.5",
- "resolved": "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.7.5.tgz",
- "integrity": "sha512-OWORNpfjMsSSUBVrRBVGECkhWcULOAJz9ZW8uK9qgxD+87M7jHRcvh/A96XXNhXTLmKcoYSQtBEX7lHMO7YRwg==",
- "license": "MIT"
- },
- "node_modules/@emotion/use-insertion-effect-with-fallbacks": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/@emotion/use-insertion-effect-with-fallbacks/-/use-insertion-effect-with-fallbacks-1.2.0.tgz",
- "integrity": "sha512-yJMtVdH59sxi/aVJBpk9FQq+OR8ll5GT8oWd57UpeaKEVGab41JWaCFA7FRLoMLloOZF/c/wsPoe+bfGmRKgDg==",
- "license": "MIT",
- "peerDependencies": {
- "react": ">=16.8.0"
- }
- },
- "node_modules/@emotion/utils": {
- "version": "1.4.2",
- "resolved": "https://registry.npmjs.org/@emotion/utils/-/utils-1.4.2.tgz",
- "integrity": "sha512-3vLclRofFziIa3J2wDh9jjbkUz9qk5Vi3IZ/FSTKViB0k+ef0fPV7dYrUIugbgupYDx7v9ud/SjrtEP8Y4xLoA==",
- "license": "MIT"
- },
- "node_modules/@emotion/weak-memoize": {
- "version": "0.4.0",
- "resolved": "https://registry.npmjs.org/@emotion/weak-memoize/-/weak-memoize-0.4.0.tgz",
- "integrity": "sha512-snKqtPW01tN0ui7yu9rGv69aJXr/a/Ywvl11sUjNtEcRc+ng/mQriFL0wLXMef74iHa/EkftbDzU9F8iFbH+zg==",
- "license": "MIT"
- },
- "node_modules/@esbuild/aix-ppc64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz",
- "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==",
- "cpu": [
- "ppc64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "aix"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/android-arm": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz",
- "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==",
- "cpu": [
- "arm"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "android"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/android-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz",
- "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "android"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/android-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz",
- "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "android"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/darwin-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz",
- "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "darwin"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/darwin-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz",
- "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "darwin"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/freebsd-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz",
- "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "freebsd"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/freebsd-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz",
- "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "freebsd"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/linux-arm": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz",
- "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==",
- "cpu": [
- "arm"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/linux-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz",
- "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/linux-ia32": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz",
- "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==",
- "cpu": [
- "ia32"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/linux-loong64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz",
- "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==",
- "cpu": [
- "loong64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/linux-mips64el": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz",
- "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==",
- "cpu": [
- "mips64el"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/linux-ppc64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz",
- "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==",
- "cpu": [
- "ppc64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/linux-riscv64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz",
- "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==",
- "cpu": [
- "riscv64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/linux-s390x": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz",
- "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==",
- "cpu": [
- "s390x"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/linux-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz",
- "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/netbsd-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz",
- "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "netbsd"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/openbsd-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz",
- "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "openbsd"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/sunos-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz",
- "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "sunos"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/win32-arm64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz",
- "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "win32"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/win32-ia32": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz",
- "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==",
- "cpu": [
- "ia32"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "win32"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@esbuild/win32-x64": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz",
- "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "win32"
- ],
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@eslint-community/eslint-utils": {
- "version": "4.9.1",
- "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.1.tgz",
- "integrity": "sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "eslint-visitor-keys": "^3.4.3"
- },
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- },
- "peerDependencies": {
- "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0"
- }
- },
- "node_modules/@eslint-community/regexpp": {
- "version": "4.12.2",
- "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz",
- "integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": "^12.0.0 || ^14.0.0 || >=16.0.0"
- }
- },
- "node_modules/@eslint/eslintrc": {
- "version": "2.1.4",
- "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz",
- "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ajv": "^6.12.4",
- "debug": "^4.3.2",
- "espree": "^9.6.0",
- "globals": "^13.19.0",
- "ignore": "^5.2.0",
- "import-fresh": "^3.2.1",
- "js-yaml": "^4.1.0",
- "minimatch": "^3.1.2",
- "strip-json-comments": "^3.1.1"
- },
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/@eslint/eslintrc/node_modules/brace-expansion": {
- "version": "1.1.12",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
- "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- }
- },
- "node_modules/@eslint/eslintrc/node_modules/minimatch": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
- "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "brace-expansion": "^1.1.7"
- },
- "engines": {
- "node": "*"
- }
- },
- "node_modules/@eslint/js": {
- "version": "8.57.1",
- "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz",
- "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- }
- },
- "node_modules/@humanwhocodes/config-array": {
- "version": "0.13.0",
- "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz",
- "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==",
- "deprecated": "Use @eslint/config-array instead",
- "dev": true,
- "license": "Apache-2.0",
- "dependencies": {
- "@humanwhocodes/object-schema": "^2.0.3",
- "debug": "^4.3.1",
- "minimatch": "^3.0.5"
- },
- "engines": {
- "node": ">=10.10.0"
- }
- },
- "node_modules/@humanwhocodes/config-array/node_modules/brace-expansion": {
- "version": "1.1.12",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
- "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- }
- },
- "node_modules/@humanwhocodes/config-array/node_modules/minimatch": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
- "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "brace-expansion": "^1.1.7"
- },
- "engines": {
- "node": "*"
- }
- },
- "node_modules/@humanwhocodes/module-importer": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
- "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==",
- "dev": true,
- "license": "Apache-2.0",
- "engines": {
- "node": ">=12.22"
- },
- "funding": {
- "type": "github",
- "url": "https://github.com/sponsors/nzakas"
- }
- },
- "node_modules/@humanwhocodes/object-schema": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz",
- "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==",
- "deprecated": "Use @eslint/object-schema instead",
- "dev": true,
- "license": "BSD-3-Clause"
- },
- "node_modules/@intlify/core-base": {
- "version": "9.14.5",
- "resolved": "https://registry.npmjs.org/@intlify/core-base/-/core-base-9.14.5.tgz",
- "integrity": "sha512-5ah5FqZG4pOoHjkvs8mjtv+gPKYU0zCISaYNjBNNqYiaITxW8ZtVih3GS/oTOqN8d9/mDLyrjD46GBApNxmlsA==",
- "license": "MIT",
- "dependencies": {
- "@intlify/message-compiler": "9.14.5",
- "@intlify/shared": "9.14.5"
- },
- "engines": {
- "node": ">= 16"
- },
- "funding": {
- "url": "https://github.com/sponsors/kazupon"
- }
- },
- "node_modules/@intlify/message-compiler": {
- "version": "9.14.5",
- "resolved": "https://registry.npmjs.org/@intlify/message-compiler/-/message-compiler-9.14.5.tgz",
- "integrity": "sha512-IHzgEu61/YIpQV5Pc3aRWScDcnFKWvQA9kigcINcCBXN8mbW+vk9SK+lDxA6STzKQsVJxUPg9ACC52pKKo3SVQ==",
- "license": "MIT",
- "dependencies": {
- "@intlify/shared": "9.14.5",
- "source-map-js": "^1.0.2"
- },
- "engines": {
- "node": ">= 16"
- },
- "funding": {
- "url": "https://github.com/sponsors/kazupon"
- }
- },
- "node_modules/@intlify/shared": {
- "version": "9.14.5",
- "resolved": "https://registry.npmjs.org/@intlify/shared/-/shared-9.14.5.tgz",
- "integrity": "sha512-9gB+E53BYuAEMhbCAxVgG38EZrk59sxBtv3jSizNL2hEWlgjBjAw1AwpLHtNaeda12pe6W20OGEa0TwuMSRbyQ==",
- "license": "MIT",
- "engines": {
- "node": ">= 16"
- },
- "funding": {
- "url": "https://github.com/sponsors/kazupon"
- }
- },
- "node_modules/@isaacs/cliui": {
- "version": "8.0.2",
- "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
- "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "string-width": "^5.1.2",
- "string-width-cjs": "npm:string-width@^4.2.0",
- "strip-ansi": "^7.0.1",
- "strip-ansi-cjs": "npm:strip-ansi@^6.0.1",
- "wrap-ansi": "^8.1.0",
- "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0"
- },
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/@isaacs/cliui/node_modules/ansi-regex": {
- "version": "6.2.2",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
- "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/ansi-regex?sponsor=1"
- }
- },
- "node_modules/@isaacs/cliui/node_modules/strip-ansi": {
- "version": "7.1.2",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
- "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ansi-regex": "^6.0.1"
- },
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/strip-ansi?sponsor=1"
- }
- },
- "node_modules/@istanbuljs/schema": {
- "version": "0.1.3",
- "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz",
- "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/@jridgewell/gen-mapping": {
- "version": "0.3.13",
- "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz",
- "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==",
- "license": "MIT",
- "dependencies": {
- "@jridgewell/sourcemap-codec": "^1.5.0",
- "@jridgewell/trace-mapping": "^0.3.24"
- }
- },
- "node_modules/@jridgewell/resolve-uri": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
- "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
- "license": "MIT",
- "engines": {
- "node": ">=6.0.0"
- }
- },
- "node_modules/@jridgewell/sourcemap-codec": {
- "version": "1.5.5",
- "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz",
- "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==",
- "license": "MIT"
- },
- "node_modules/@jridgewell/trace-mapping": {
- "version": "0.3.31",
- "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz",
- "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==",
- "license": "MIT",
- "dependencies": {
- "@jridgewell/resolve-uri": "^3.1.0",
- "@jridgewell/sourcemap-codec": "^1.4.14"
- }
- },
- "node_modules/@kurkle/color": {
- "version": "0.3.4",
- "resolved": "https://registry.npmjs.org/@kurkle/color/-/color-0.3.4.tgz",
- "integrity": "sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w==",
- "license": "MIT"
- },
- "node_modules/@lobehub/icons": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/@lobehub/icons/-/icons-4.0.2.tgz",
- "integrity": "sha512-mYFEXXt7Z8iY8yLP5cDVctUPqlZUHWi5qzQCJiC646p7uiXhtpn93sRab/5pey+CYDh6BbRU6lhwiURu/SU5IA==",
- "license": "MIT",
- "workspaces": [
- "packages/*"
- ],
- "dependencies": {
- "antd-style": "^4.1.0",
- "lucide-react": "^0.469.0",
- "polished": "^4.3.1"
- },
- "peerDependencies": {
- "@lobehub/ui": "^4.3.3",
- "antd": "^6.1.1",
- "react": "^19.0.0",
- "react-dom": "^19.0.0"
- }
- },
- "node_modules/@nodelib/fs.scandir": {
- "version": "2.1.5",
- "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
- "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@nodelib/fs.stat": "2.0.5",
- "run-parallel": "^1.1.9"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/@nodelib/fs.stat": {
- "version": "2.0.5",
- "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
- "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/@nodelib/fs.walk": {
- "version": "1.2.8",
- "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz",
- "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@nodelib/fs.scandir": "2.1.5",
- "fastq": "^1.6.0"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/@one-ini/wasm": {
- "version": "0.1.1",
- "resolved": "https://registry.npmjs.org/@one-ini/wasm/-/wasm-0.1.1.tgz",
- "integrity": "sha512-XuySG1E38YScSJoMlqovLru4KTUNSjgVTIjyh7qMX6aNN5HY5Ct5LhRJdxO79JtTzKfzV/bnWpz+zquYrISsvw==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/@pkgjs/parseargs": {
- "version": "0.11.0",
- "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
- "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
- "dev": true,
- "license": "MIT",
- "optional": true,
- "engines": {
- "node": ">=14"
- }
- },
- "node_modules/@rc-component/util": {
- "version": "1.7.0",
- "resolved": "https://registry.npmjs.org/@rc-component/util/-/util-1.7.0.tgz",
- "integrity": "sha512-tIvIGj4Vl6fsZFvWSkYw9sAfiCKUXMyhVz6kpKyZbwyZyRPqv2vxYZROdaO1VB4gqTNvUZFXh6i3APUiterw5g==",
- "license": "MIT",
- "dependencies": {
- "is-mobile": "^5.0.0",
- "react-is": "^18.2.0"
- },
- "peerDependencies": {
- "react": ">=18.0.0",
- "react-dom": ">=18.0.0"
- }
- },
- "node_modules/@rollup/rollup-android-arm-eabi": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.55.1.tgz",
- "integrity": "sha512-9R0DM/ykwfGIlNu6+2U09ga0WXeZ9MRC2Ter8jnz8415VbuIykVuc6bhdrbORFZANDmTDvq26mJrEVTl8TdnDg==",
- "cpu": [
- "arm"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "android"
- ]
- },
- "node_modules/@rollup/rollup-android-arm64": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.55.1.tgz",
- "integrity": "sha512-eFZCb1YUqhTysgW3sj/55du5cG57S7UTNtdMjCW7LwVcj3dTTcowCsC8p7uBdzKsZYa8J7IDE8lhMI+HX1vQvg==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "android"
- ]
- },
- "node_modules/@rollup/rollup-darwin-arm64": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.55.1.tgz",
- "integrity": "sha512-p3grE2PHcQm2e8PSGZdzIhCKbMCw/xi9XvMPErPhwO17vxtvCN5FEA2mSLgmKlCjHGMQTP6phuQTYWUnKewwGg==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "darwin"
- ]
- },
- "node_modules/@rollup/rollup-darwin-x64": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.55.1.tgz",
- "integrity": "sha512-rDUjG25C9qoTm+e02Esi+aqTKSBYwVTaoS1wxcN47/Luqef57Vgp96xNANwt5npq9GDxsH7kXxNkJVEsWEOEaQ==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "darwin"
- ]
- },
- "node_modules/@rollup/rollup-freebsd-arm64": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.55.1.tgz",
- "integrity": "sha512-+JiU7Jbp5cdxekIgdte0jfcu5oqw4GCKr6i3PJTlXTCU5H5Fvtkpbs4XJHRmWNXF+hKmn4v7ogI5OQPaupJgOg==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "freebsd"
- ]
- },
- "node_modules/@rollup/rollup-freebsd-x64": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.55.1.tgz",
- "integrity": "sha512-V5xC1tOVWtLLmr3YUk2f6EJK4qksksOYiz/TCsFHu/R+woubcLWdC9nZQmwjOAbmExBIVKsm1/wKmEy4z4u4Bw==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "freebsd"
- ]
- },
- "node_modules/@rollup/rollup-linux-arm-gnueabihf": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.55.1.tgz",
- "integrity": "sha512-Rn3n+FUk2J5VWx+ywrG/HGPTD9jXNbicRtTM11e/uorplArnXZYsVifnPPqNNP5BsO3roI4n8332ukpY/zN7rQ==",
- "cpu": [
- "arm"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ]
- },
- "node_modules/@rollup/rollup-linux-arm-musleabihf": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.55.1.tgz",
- "integrity": "sha512-grPNWydeKtc1aEdrJDWk4opD7nFtQbMmV7769hiAaYyUKCT1faPRm2av8CX1YJsZ4TLAZcg9gTR1KvEzoLjXkg==",
- "cpu": [
- "arm"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ]
- },
- "node_modules/@rollup/rollup-linux-arm64-gnu": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.55.1.tgz",
- "integrity": "sha512-a59mwd1k6x8tXKcUxSyISiquLwB5pX+fJW9TkWU46lCqD/GRDe9uDN31jrMmVP3feI3mhAdvcCClhV8V5MhJFQ==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ]
- },
- "node_modules/@rollup/rollup-linux-arm64-musl": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.55.1.tgz",
- "integrity": "sha512-puS1MEgWX5GsHSoiAsF0TYrpomdvkaXm0CofIMG5uVkP6IBV+ZO9xhC5YEN49nsgYo1DuuMquF9+7EDBVYu4uA==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ]
- },
- "node_modules/@rollup/rollup-linux-loong64-gnu": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.55.1.tgz",
- "integrity": "sha512-r3Wv40in+lTsULSb6nnoudVbARdOwb2u5fpeoOAZjFLznp6tDU8kd+GTHmJoqZ9lt6/Sys33KdIHUaQihFcu7g==",
- "cpu": [
- "loong64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ]
- },
- "node_modules/@rollup/rollup-linux-loong64-musl": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.55.1.tgz",
- "integrity": "sha512-MR8c0+UxAlB22Fq4R+aQSPBayvYa3+9DrwG/i1TKQXFYEaoW3B5b/rkSRIypcZDdWjWnpcvxbNaAJDcSbJU3Lw==",
- "cpu": [
- "loong64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ]
- },
- "node_modules/@rollup/rollup-linux-ppc64-gnu": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.55.1.tgz",
- "integrity": "sha512-3KhoECe1BRlSYpMTeVrD4sh2Pw2xgt4jzNSZIIPLFEsnQn9gAnZagW9+VqDqAHgm1Xc77LzJOo2LdigS5qZ+gw==",
- "cpu": [
- "ppc64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ]
- },
- "node_modules/@rollup/rollup-linux-ppc64-musl": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.55.1.tgz",
- "integrity": "sha512-ziR1OuZx0vdYZZ30vueNZTg73alF59DicYrPViG0NEgDVN8/Jl87zkAPu4u6VjZST2llgEUjaiNl9JM6HH1Vdw==",
- "cpu": [
- "ppc64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ]
- },
- "node_modules/@rollup/rollup-linux-riscv64-gnu": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.55.1.tgz",
- "integrity": "sha512-uW0Y12ih2XJRERZ4jAfKamTyIHVMPQnTZcQjme2HMVDAHY4amf5u414OqNYC+x+LzRdRcnIG1YodLrrtA8xsxw==",
- "cpu": [
- "riscv64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ]
- },
- "node_modules/@rollup/rollup-linux-riscv64-musl": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.55.1.tgz",
- "integrity": "sha512-u9yZ0jUkOED1BFrqu3BwMQoixvGHGZ+JhJNkNKY/hyoEgOwlqKb62qu+7UjbPSHYjiVy8kKJHvXKv5coH4wDeg==",
- "cpu": [
- "riscv64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ]
- },
- "node_modules/@rollup/rollup-linux-s390x-gnu": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.55.1.tgz",
- "integrity": "sha512-/0PenBCmqM4ZUd0190j7J0UsQ/1nsi735iPRakO8iPciE7BQ495Y6msPzaOmvx0/pn+eJVVlZrNrSh4WSYLxNg==",
- "cpu": [
- "s390x"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ]
- },
- "node_modules/@rollup/rollup-linux-x64-gnu": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.55.1.tgz",
- "integrity": "sha512-a8G4wiQxQG2BAvo+gU6XrReRRqj+pLS2NGXKm8io19goR+K8lw269eTrPkSdDTALwMmJp4th2Uh0D8J9bEV1vg==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ]
- },
- "node_modules/@rollup/rollup-linux-x64-musl": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.55.1.tgz",
- "integrity": "sha512-bD+zjpFrMpP/hqkfEcnjXWHMw5BIghGisOKPj+2NaNDuVT+8Ds4mPf3XcPHuat1tz89WRL+1wbcxKY3WSbiT7w==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "linux"
- ]
- },
- "node_modules/@rollup/rollup-openbsd-x64": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.55.1.tgz",
- "integrity": "sha512-eLXw0dOiqE4QmvikfQ6yjgkg/xDM+MdU9YJuP4ySTibXU0oAvnEWXt7UDJmD4UkYialMfOGFPJnIHSe/kdzPxg==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "openbsd"
- ]
- },
- "node_modules/@rollup/rollup-openharmony-arm64": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.55.1.tgz",
- "integrity": "sha512-xzm44KgEP11te3S2HCSyYf5zIzWmx3n8HDCc7EE59+lTcswEWNpvMLfd9uJvVX8LCg9QWG67Xt75AuHn4vgsXw==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "openharmony"
- ]
- },
- "node_modules/@rollup/rollup-win32-arm64-msvc": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.55.1.tgz",
- "integrity": "sha512-yR6Bl3tMC/gBok5cz/Qi0xYnVbIxGx5Fcf/ca0eB6/6JwOY+SRUcJfI0OpeTpPls7f194as62thCt/2BjxYN8g==",
- "cpu": [
- "arm64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "win32"
- ]
- },
- "node_modules/@rollup/rollup-win32-ia32-msvc": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.55.1.tgz",
- "integrity": "sha512-3fZBidchE0eY0oFZBnekYCfg+5wAB0mbpCBuofh5mZuzIU/4jIVkbESmd2dOsFNS78b53CYv3OAtwqkZZmU5nA==",
- "cpu": [
- "ia32"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "win32"
- ]
- },
- "node_modules/@rollup/rollup-win32-x64-gnu": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.55.1.tgz",
- "integrity": "sha512-xGGY5pXj69IxKb4yv/POoocPy/qmEGhimy/FoTpTSVju3FYXUQQMFCaZZXJVidsmGxRioZAwpThl/4zX41gRKg==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "win32"
- ]
- },
- "node_modules/@rollup/rollup-win32-x64-msvc": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.55.1.tgz",
- "integrity": "sha512-SPEpaL6DX4rmcXtnhdrQYgzQ5W2uW3SCJch88lB2zImhJRhIIK44fkUrgIV/Q8yUNfw5oyZ5vkeQsZLhCb06lw==",
- "cpu": [
- "x64"
- ],
- "dev": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "win32"
- ]
- },
- "node_modules/@types/estree": {
- "version": "1.0.8",
- "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz",
- "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/@types/file-saver": {
- "version": "2.0.7",
- "resolved": "https://registry.npmjs.org/@types/file-saver/-/file-saver-2.0.7.tgz",
- "integrity": "sha512-dNKVfHd/jk0SkR/exKGj2ggkB45MAkzvWCaqLUUgkyjITkGNzH8H+yUwr+BLJUBjZOe9w8X3wgmXhZDRg1ED6A==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/@types/mdx": {
- "version": "2.0.13",
- "resolved": "https://registry.npmjs.org/@types/mdx/-/mdx-2.0.13.tgz",
- "integrity": "sha512-+OWZQfAYyio6YkJb3HLxDrvnx6SWWDbC0zVPfBRzUk0/nqoDyf6dNxQi3eArPe8rJ473nobTMQ/8Zk+LxJ+Yuw==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/@types/node": {
- "version": "20.19.27",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.27.tgz",
- "integrity": "sha512-N2clP5pJhB2YnZJ3PIHFk5RkygRX5WO/5f0WC08tp0wd+sv0rsJk3MqWn3CbNmT2J505a5336jaQj4ph1AdMug==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "undici-types": "~6.21.0"
- }
- },
- "node_modules/@types/parse-json": {
- "version": "4.0.2",
- "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.2.tgz",
- "integrity": "sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==",
- "license": "MIT"
- },
- "node_modules/@types/qrcode": {
- "version": "1.5.6",
- "resolved": "https://registry.npmmirror.com/@types/qrcode/-/qrcode-1.5.6.tgz",
- "integrity": "sha512-te7NQcV2BOvdj2b1hCAHzAoMNuj65kNBMz0KBaxM6c3VGBOhU0dURQKOtH8CFNI/dsKkwlv32p26qYQTWoB5bw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@types/node": "*"
- }
- },
- "node_modules/@types/web-bluetooth": {
- "version": "0.0.20",
- "resolved": "https://registry.npmjs.org/@types/web-bluetooth/-/web-bluetooth-0.0.20.tgz",
- "integrity": "sha512-g9gZnnXVq7gM7v3tJCWV/qw7w+KeOlSHAhgF9RytFyifW6AF61hdT2ucrYhPq9hLs5JIryeupHV3qGk95dH9ow==",
- "license": "MIT"
- },
- "node_modules/@typescript-eslint/eslint-plugin": {
- "version": "7.18.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.18.0.tgz",
- "integrity": "sha512-94EQTWZ40mzBc42ATNIBimBEDltSJ9RQHCC8vc/PDbxi4k8dVwUAv4o98dk50M1zB+JGFxp43FP7f8+FP8R6Sw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@eslint-community/regexpp": "^4.10.0",
- "@typescript-eslint/scope-manager": "7.18.0",
- "@typescript-eslint/type-utils": "7.18.0",
- "@typescript-eslint/utils": "7.18.0",
- "@typescript-eslint/visitor-keys": "7.18.0",
- "graphemer": "^1.4.0",
- "ignore": "^5.3.1",
- "natural-compare": "^1.4.0",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "@typescript-eslint/parser": "^7.0.0",
- "eslint": "^8.56.0"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/parser": {
- "version": "7.18.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.18.0.tgz",
- "integrity": "sha512-4Z+L8I2OqhZV8qA132M4wNL30ypZGYOQVBfMgxDH/K5UX0PNqTu1c6za9ST5r9+tavvHiTWmBnKzpCJ/GlVFtg==",
- "dev": true,
- "license": "BSD-2-Clause",
- "dependencies": {
- "@typescript-eslint/scope-manager": "7.18.0",
- "@typescript-eslint/types": "7.18.0",
- "@typescript-eslint/typescript-estree": "7.18.0",
- "@typescript-eslint/visitor-keys": "7.18.0",
- "debug": "^4.3.4"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "eslint": "^8.56.0"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/scope-manager": {
- "version": "7.18.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.18.0.tgz",
- "integrity": "sha512-jjhdIE/FPF2B7Z1uzc6i3oWKbGcHb87Qw7AWj6jmEqNOfDFbJWtjt/XfwCpvNkpGWlcJaog5vTR+VV8+w9JflA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@typescript-eslint/types": "7.18.0",
- "@typescript-eslint/visitor-keys": "7.18.0"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/type-utils": {
- "version": "7.18.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.18.0.tgz",
- "integrity": "sha512-XL0FJXuCLaDuX2sYqZUUSOJ2sG5/i1AAze+axqmLnSkNEVMVYLF+cbwlB2w8D1tinFuSikHmFta+P+HOofrLeA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@typescript-eslint/typescript-estree": "7.18.0",
- "@typescript-eslint/utils": "7.18.0",
- "debug": "^4.3.4",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "eslint": "^8.56.0"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/types": {
- "version": "7.18.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.18.0.tgz",
- "integrity": "sha512-iZqi+Ds1y4EDYUtlOOC+aUmxnE9xS/yCigkjA7XpTKV6nCBd3Hp/PRGGmdwnfkV2ThMyYldP1wRpm/id99spTQ==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@typescript-eslint/typescript-estree": {
- "version": "7.18.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.18.0.tgz",
- "integrity": "sha512-aP1v/BSPnnyhMHts8cf1qQ6Q1IFwwRvAQGRvBFkWlo3/lH29OXA3Pts+c10nxRxIBrDnoMqzhgdwVe5f2D6OzA==",
- "dev": true,
- "license": "BSD-2-Clause",
- "dependencies": {
- "@typescript-eslint/types": "7.18.0",
- "@typescript-eslint/visitor-keys": "7.18.0",
- "debug": "^4.3.4",
- "globby": "^11.1.0",
- "is-glob": "^4.0.3",
- "minimatch": "^9.0.4",
- "semver": "^7.6.0",
- "ts-api-utils": "^1.3.0"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@typescript-eslint/utils": {
- "version": "7.18.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.18.0.tgz",
- "integrity": "sha512-kK0/rNa2j74XuHVcoCZxdFBMF+aq/vH83CXAOHieC+2Gis4mF8jJXT5eAfyD3K0sAxtPuwxaIOIOvhwzVDt/kw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@eslint-community/eslint-utils": "^4.4.0",
- "@typescript-eslint/scope-manager": "7.18.0",
- "@typescript-eslint/types": "7.18.0",
- "@typescript-eslint/typescript-estree": "7.18.0"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- },
- "peerDependencies": {
- "eslint": "^8.56.0"
- }
- },
- "node_modules/@typescript-eslint/visitor-keys": {
- "version": "7.18.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.18.0.tgz",
- "integrity": "sha512-cDF0/Gf81QpY3xYyJKDV14Zwdmid5+uuENhjH2EqFaF0ni+yAyq/LzMaIJdhNJXZI7uLzwIlA+V7oWoyn6Curg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@typescript-eslint/types": "7.18.0",
- "eslint-visitor-keys": "^3.4.3"
- },
- "engines": {
- "node": "^18.18.0 || >=20.0.0"
- },
- "funding": {
- "type": "opencollective",
- "url": "https://opencollective.com/typescript-eslint"
- }
- },
- "node_modules/@ungap/structured-clone": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz",
- "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==",
- "dev": true,
- "license": "ISC"
- },
- "node_modules/@vitejs/plugin-vue": {
- "version": "5.2.4",
- "resolved": "https://registry.npmjs.org/@vitejs/plugin-vue/-/plugin-vue-5.2.4.tgz",
- "integrity": "sha512-7Yx/SXSOcQq5HiiV3orevHUFn+pmMB4cgbEkDYgnkUWb0WfeQ/wa2yFv6D5ICiCQOVpjA7vYDXrC7AGO8yjDHA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": "^18.0.0 || >=20.0.0"
- },
- "peerDependencies": {
- "vite": "^5.0.0 || ^6.0.0",
- "vue": "^3.2.25"
- }
- },
- "node_modules/@vitest/coverage-v8": {
- "version": "2.1.9",
- "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-2.1.9.tgz",
- "integrity": "sha512-Z2cOr0ksM00MpEfyVE8KXIYPEcBFxdbLSs56L8PO0QQMxt/6bDj45uQfxoc96v05KW3clk7vvgP0qfDit9DmfQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@ampproject/remapping": "^2.3.0",
- "@bcoe/v8-coverage": "^0.2.3",
- "debug": "^4.3.7",
- "istanbul-lib-coverage": "^3.2.2",
- "istanbul-lib-report": "^3.0.1",
- "istanbul-lib-source-maps": "^5.0.6",
- "istanbul-reports": "^3.1.7",
- "magic-string": "^0.30.12",
- "magicast": "^0.3.5",
- "std-env": "^3.8.0",
- "test-exclude": "^7.0.1",
- "tinyrainbow": "^1.2.0"
- },
- "funding": {
- "url": "https://opencollective.com/vitest"
- },
- "peerDependencies": {
- "@vitest/browser": "2.1.9",
- "vitest": "2.1.9"
- },
- "peerDependenciesMeta": {
- "@vitest/browser": {
- "optional": true
- }
- }
- },
- "node_modules/@vitest/expect": {
- "version": "2.1.9",
- "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-2.1.9.tgz",
- "integrity": "sha512-UJCIkTBenHeKT1TTlKMJWy1laZewsRIzYighyYiJKZreqtdxSos/S1t+ktRMQWu2CKqaarrkeszJx1cgC5tGZw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@vitest/spy": "2.1.9",
- "@vitest/utils": "2.1.9",
- "chai": "^5.1.2",
- "tinyrainbow": "^1.2.0"
- },
- "funding": {
- "url": "https://opencollective.com/vitest"
- }
- },
- "node_modules/@vitest/mocker": {
- "version": "2.1.9",
- "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-2.1.9.tgz",
- "integrity": "sha512-tVL6uJgoUdi6icpxmdrn5YNo3g3Dxv+IHJBr0GXHaEdTcw3F+cPKnsXFhli6nO+f/6SDKPHEK1UN+k+TQv0Ehg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@vitest/spy": "2.1.9",
- "estree-walker": "^3.0.3",
- "magic-string": "^0.30.12"
- },
- "funding": {
- "url": "https://opencollective.com/vitest"
- },
- "peerDependencies": {
- "msw": "^2.4.9",
- "vite": "^5.0.0"
- },
- "peerDependenciesMeta": {
- "msw": {
- "optional": true
- },
- "vite": {
- "optional": true
- }
- }
- },
- "node_modules/@vitest/mocker/node_modules/estree-walker": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz",
- "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@types/estree": "^1.0.0"
- }
- },
- "node_modules/@vitest/pretty-format": {
- "version": "2.1.9",
- "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.1.9.tgz",
- "integrity": "sha512-KhRIdGV2U9HOUzxfiHmY8IFHTdqtOhIzCpd8WRdJiE7D/HUcZVD0EgQCVjm+Q9gkUXWgBvMmTtZgIG48wq7sOQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "tinyrainbow": "^1.2.0"
- },
- "funding": {
- "url": "https://opencollective.com/vitest"
- }
- },
- "node_modules/@vitest/runner": {
- "version": "2.1.9",
- "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-2.1.9.tgz",
- "integrity": "sha512-ZXSSqTFIrzduD63btIfEyOmNcBmQvgOVsPNPe0jYtESiXkhd8u2erDLnMxmGrDCwHCCHE7hxwRDCT3pt0esT4g==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@vitest/utils": "2.1.9",
- "pathe": "^1.1.2"
- },
- "funding": {
- "url": "https://opencollective.com/vitest"
- }
- },
- "node_modules/@vitest/snapshot": {
- "version": "2.1.9",
- "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-2.1.9.tgz",
- "integrity": "sha512-oBO82rEjsxLNJincVhLhaxxZdEtV0EFHMK5Kmx5sJ6H9L183dHECjiefOAdnqpIgT5eZwT04PoggUnW88vOBNQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@vitest/pretty-format": "2.1.9",
- "magic-string": "^0.30.12",
- "pathe": "^1.1.2"
- },
- "funding": {
- "url": "https://opencollective.com/vitest"
- }
- },
- "node_modules/@vitest/spy": {
- "version": "2.1.9",
- "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-2.1.9.tgz",
- "integrity": "sha512-E1B35FwzXXTs9FHNK6bDszs7mtydNi5MIfUWpceJ8Xbfb1gBMscAnwLbEu+B44ed6W3XjL9/ehLPHR1fkf1KLQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "tinyspy": "^3.0.2"
- },
- "funding": {
- "url": "https://opencollective.com/vitest"
- }
- },
- "node_modules/@vitest/utils": {
- "version": "2.1.9",
- "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-2.1.9.tgz",
- "integrity": "sha512-v0psaMSkNJ3A2NMrUEHFRzJtDPFn+/VWZ5WxImB21T9fjucJRmS7xCS3ppEnARb9y11OAzaD+P2Ps+b+BGX5iQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@vitest/pretty-format": "2.1.9",
- "loupe": "^3.1.2",
- "tinyrainbow": "^1.2.0"
- },
- "funding": {
- "url": "https://opencollective.com/vitest"
- }
- },
- "node_modules/@volar/language-core": {
- "version": "2.4.15",
- "resolved": "https://registry.npmjs.org/@volar/language-core/-/language-core-2.4.15.tgz",
- "integrity": "sha512-3VHw+QZU0ZG9IuQmzT68IyN4hZNd9GchGPhbD9+pa8CVv7rnoOZwo7T8weIbrRmihqy3ATpdfXFnqRrfPVK6CA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@volar/source-map": "2.4.15"
- }
- },
- "node_modules/@volar/source-map": {
- "version": "2.4.15",
- "resolved": "https://registry.npmjs.org/@volar/source-map/-/source-map-2.4.15.tgz",
- "integrity": "sha512-CPbMWlUN6hVZJYGcU/GSoHu4EnCHiLaXI9n8c9la6RaI9W5JHX+NqG+GSQcB0JdC2FIBLdZJwGsfKyBB71VlTg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/@volar/typescript": {
- "version": "2.4.15",
- "resolved": "https://registry.npmjs.org/@volar/typescript/-/typescript-2.4.15.tgz",
- "integrity": "sha512-2aZ8i0cqPGjXb4BhkMsPYDkkuc2ZQ6yOpqwAuNwUoncELqoy5fRgOQtLR9gB0g902iS0NAkvpIzs27geVyVdPg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@volar/language-core": "2.4.15",
- "path-browserify": "^1.0.1",
- "vscode-uri": "^3.0.8"
- }
- },
- "node_modules/@vue/compiler-core": {
- "version": "3.5.26",
- "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.26.tgz",
- "integrity": "sha512-vXyI5GMfuoBCnv5ucIT7jhHKl55Y477yxP6fc4eUswjP8FG3FFVFd41eNDArR+Uk3QKn2Z85NavjaxLxOC19/w==",
- "license": "MIT",
- "dependencies": {
- "@babel/parser": "^7.28.5",
- "@vue/shared": "3.5.26",
- "entities": "^7.0.0",
- "estree-walker": "^2.0.2",
- "source-map-js": "^1.2.1"
- }
- },
- "node_modules/@vue/compiler-dom": {
- "version": "3.5.26",
- "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.26.tgz",
- "integrity": "sha512-y1Tcd3eXs834QjswshSilCBnKGeQjQXB6PqFn/1nxcQw4pmG42G8lwz+FZPAZAby6gZeHSt/8LMPfZ4Rb+Bd/A==",
- "license": "MIT",
- "dependencies": {
- "@vue/compiler-core": "3.5.26",
- "@vue/shared": "3.5.26"
- }
- },
- "node_modules/@vue/compiler-sfc": {
- "version": "3.5.26",
- "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.26.tgz",
- "integrity": "sha512-egp69qDTSEZcf4bGOSsprUr4xI73wfrY5oRs6GSgXFTiHrWj4Y3X5Ydtip9QMqiCMCPVwLglB9GBxXtTadJ3mA==",
- "license": "MIT",
- "dependencies": {
- "@babel/parser": "^7.28.5",
- "@vue/compiler-core": "3.5.26",
- "@vue/compiler-dom": "3.5.26",
- "@vue/compiler-ssr": "3.5.26",
- "@vue/shared": "3.5.26",
- "estree-walker": "^2.0.2",
- "magic-string": "^0.30.21",
- "postcss": "^8.5.6",
- "source-map-js": "^1.2.1"
- }
- },
- "node_modules/@vue/compiler-ssr": {
- "version": "3.5.26",
- "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.26.tgz",
- "integrity": "sha512-lZT9/Y0nSIRUPVvapFJEVDbEXruZh2IYHMk2zTtEgJSlP5gVOqeWXH54xDKAaFS4rTnDeDBQUYDtxKyoW9FwDw==",
- "license": "MIT",
- "dependencies": {
- "@vue/compiler-dom": "3.5.26",
- "@vue/shared": "3.5.26"
- }
- },
- "node_modules/@vue/compiler-vue2": {
- "version": "2.7.16",
- "resolved": "https://registry.npmjs.org/@vue/compiler-vue2/-/compiler-vue2-2.7.16.tgz",
- "integrity": "sha512-qYC3Psj9S/mfu9uVi5WvNZIzq+xnXMhOwbTFKKDD7b1lhpnn71jXSFdTQ+WsIEk0ONCd7VV2IMm7ONl6tbQ86A==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "de-indent": "^1.0.2",
- "he": "^1.2.0"
- }
- },
- "node_modules/@vue/devtools-api": {
- "version": "6.6.4",
- "resolved": "https://registry.npmjs.org/@vue/devtools-api/-/devtools-api-6.6.4.tgz",
- "integrity": "sha512-sGhTPMuXqZ1rVOk32RylztWkfXTRhuS7vgAKv0zjqk8gbsHkJ7xfFf+jbySxt7tWObEJwyKaHMikV/WGDiQm8g==",
- "license": "MIT"
- },
- "node_modules/@vue/language-core": {
- "version": "2.2.12",
- "resolved": "https://registry.npmjs.org/@vue/language-core/-/language-core-2.2.12.tgz",
- "integrity": "sha512-IsGljWbKGU1MZpBPN+BvPAdr55YPkj2nB/TBNGNC32Vy2qLG25DYu/NBN2vNtZqdRbTRjaoYrahLrToim2NanA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@volar/language-core": "2.4.15",
- "@vue/compiler-dom": "^3.5.0",
- "@vue/compiler-vue2": "^2.7.16",
- "@vue/shared": "^3.5.0",
- "alien-signals": "^1.0.3",
- "minimatch": "^9.0.3",
- "muggle-string": "^0.4.1",
- "path-browserify": "^1.0.1"
- },
- "peerDependencies": {
- "typescript": "*"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/@vue/reactivity": {
- "version": "3.5.26",
- "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.26.tgz",
- "integrity": "sha512-9EnYB1/DIiUYYnzlnUBgwU32NNvLp/nhxLXeWRhHUEeWNTn1ECxX8aGO7RTXeX6PPcxe3LLuNBFoJbV4QZ+CFQ==",
- "license": "MIT",
- "dependencies": {
- "@vue/shared": "3.5.26"
- }
- },
- "node_modules/@vue/runtime-core": {
- "version": "3.5.26",
- "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.26.tgz",
- "integrity": "sha512-xJWM9KH1kd201w5DvMDOwDHYhrdPTrAatn56oB/LRG4plEQeZRQLw0Bpwih9KYoqmzaxF0OKSn6swzYi84e1/Q==",
- "license": "MIT",
- "dependencies": {
- "@vue/reactivity": "3.5.26",
- "@vue/shared": "3.5.26"
- }
- },
- "node_modules/@vue/runtime-dom": {
- "version": "3.5.26",
- "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.26.tgz",
- "integrity": "sha512-XLLd/+4sPC2ZkN/6+V4O4gjJu6kSDbHAChvsyWgm1oGbdSO3efvGYnm25yCjtFm/K7rrSDvSfPDgN1pHgS4VNQ==",
- "license": "MIT",
- "dependencies": {
- "@vue/reactivity": "3.5.26",
- "@vue/runtime-core": "3.5.26",
- "@vue/shared": "3.5.26",
- "csstype": "^3.2.3"
- }
- },
- "node_modules/@vue/server-renderer": {
- "version": "3.5.26",
- "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.26.tgz",
- "integrity": "sha512-TYKLXmrwWKSodyVuO1WAubucd+1XlLg4set0YoV+Hu8Lo79mp/YMwWV5mC5FgtsDxX3qo1ONrxFaTP1OQgy1uA==",
- "license": "MIT",
- "dependencies": {
- "@vue/compiler-ssr": "3.5.26",
- "@vue/shared": "3.5.26"
- },
- "peerDependencies": {
- "vue": "3.5.26"
- }
- },
- "node_modules/@vue/shared": {
- "version": "3.5.26",
- "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.26.tgz",
- "integrity": "sha512-7Z6/y3uFI5PRoKeorTOSXKcDj0MSasfNNltcslbFrPpcw6aXRUALq4IfJlaTRspiWIUOEZbrpM+iQGmCOiWe4A==",
- "license": "MIT"
- },
- "node_modules/@vue/test-utils": {
- "version": "2.4.6",
- "resolved": "https://registry.npmjs.org/@vue/test-utils/-/test-utils-2.4.6.tgz",
- "integrity": "sha512-FMxEjOpYNYiFe0GkaHsnJPXFHxQ6m4t8vI/ElPGpMWxZKpmRvQ33OIrvRXemy6yha03RxhOlQuy+gZMC3CQSow==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "js-beautify": "^1.14.9",
- "vue-component-type-helpers": "^2.0.0"
- }
- },
- "node_modules/@vueuse/core": {
- "version": "10.11.1",
- "resolved": "https://registry.npmjs.org/@vueuse/core/-/core-10.11.1.tgz",
- "integrity": "sha512-guoy26JQktXPcz+0n3GukWIy/JDNKti9v6VEMu6kV2sYBsWuGiTU8OWdg+ADfUbHg3/3DlqySDe7JmdHrktiww==",
- "license": "MIT",
- "dependencies": {
- "@types/web-bluetooth": "^0.0.20",
- "@vueuse/metadata": "10.11.1",
- "@vueuse/shared": "10.11.1",
- "vue-demi": ">=0.14.8"
- },
- "funding": {
- "url": "https://github.com/sponsors/antfu"
- }
- },
- "node_modules/@vueuse/metadata": {
- "version": "10.11.1",
- "resolved": "https://registry.npmjs.org/@vueuse/metadata/-/metadata-10.11.1.tgz",
- "integrity": "sha512-IGa5FXd003Ug1qAZmyE8wF3sJ81xGLSqTqtQ6jaVfkeZ4i5kS2mwQF61yhVqojRnenVew5PldLyRgvdl4YYuSw==",
- "license": "MIT",
- "funding": {
- "url": "https://github.com/sponsors/antfu"
- }
- },
- "node_modules/@vueuse/shared": {
- "version": "10.11.1",
- "resolved": "https://registry.npmjs.org/@vueuse/shared/-/shared-10.11.1.tgz",
- "integrity": "sha512-LHpC8711VFZlDaYUXEBbFBCQ7GS3dVU9mjOhhMhXP6txTV4EhYQg/KGnQuvt/sPAtoUKq7VVUnL6mVtFoL42sA==",
- "license": "MIT",
- "dependencies": {
- "vue-demi": ">=0.14.8"
- },
- "funding": {
- "url": "https://github.com/sponsors/antfu"
- }
- },
- "node_modules/abbrev": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-2.0.0.tgz",
- "integrity": "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==",
- "dev": true,
- "license": "ISC",
- "engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
- }
- },
- "node_modules/acorn": {
- "version": "8.15.0",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
- "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
- "dev": true,
- "license": "MIT",
- "bin": {
- "acorn": "bin/acorn"
- },
- "engines": {
- "node": ">=0.4.0"
- }
- },
- "node_modules/acorn-jsx": {
- "version": "5.3.2",
- "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz",
- "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==",
- "dev": true,
- "license": "MIT",
- "peerDependencies": {
- "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
- }
- },
- "node_modules/adler-32": {
- "version": "1.3.1",
- "resolved": "https://registry.npmjs.org/adler-32/-/adler-32-1.3.1.tgz",
- "integrity": "sha512-ynZ4w/nUUv5rrsR8UUGoe1VC9hZj6V5hU9Qw1HlMDJGEJw5S7TfTErWTjMys6M7vr0YWcPqs3qAr4ss0nDfP+A==",
- "license": "Apache-2.0",
- "engines": {
- "node": ">=0.8"
- }
- },
- "node_modules/agent-base": {
- "version": "7.1.4",
- "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz",
- "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 14"
- }
- },
- "node_modules/ajv": {
- "version": "6.12.6",
- "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
- "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "fast-deep-equal": "^3.1.1",
- "fast-json-stable-stringify": "^2.0.0",
- "json-schema-traverse": "^0.4.1",
- "uri-js": "^4.2.2"
- },
- "funding": {
- "type": "github",
- "url": "https://github.com/sponsors/epoberezkin"
- }
- },
- "node_modules/alien-signals": {
- "version": "1.0.13",
- "resolved": "https://registry.npmjs.org/alien-signals/-/alien-signals-1.0.13.tgz",
- "integrity": "sha512-OGj9yyTnJEttvzhTUWuscOvtqxq5vrhF7vL9oS0xJ2mK0ItPYP1/y+vCFebfxoEyAz0++1AIwJ5CMr+Fk3nDmg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/ansi-regex": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
- "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/ansi-styles": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
- "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
- "license": "MIT",
- "dependencies": {
- "color-convert": "^2.0.1"
- },
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/chalk/ansi-styles?sponsor=1"
- }
- },
- "node_modules/antd-style": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/antd-style/-/antd-style-4.1.0.tgz",
- "integrity": "sha512-vnPBGg0OVlSz90KRYZhxd89aZiOImTiesF+9MQqN8jsLGZUQTjbP04X9jTdEfsztKUuMbBWg/RmB/wHTakbtMQ==",
- "license": "MIT",
- "dependencies": {
- "@ant-design/cssinjs": "^2.0.0",
- "@babel/runtime": "^7.24.1",
- "@emotion/cache": "^11.11.0",
- "@emotion/css": "^11.11.2",
- "@emotion/react": "^11.11.4",
- "@emotion/serialize": "^1.1.3",
- "@emotion/utils": "^1.2.1",
- "use-merge-value": "^1.2.0"
- },
- "peerDependencies": {
- "antd": ">=6.0.0",
- "react": ">=18"
- }
- },
- "node_modules/any-promise": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz",
- "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/anymatch": {
- "version": "3.1.3",
- "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
- "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "normalize-path": "^3.0.0",
- "picomatch": "^2.0.4"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/arg": {
- "version": "5.0.2",
- "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz",
- "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/argparse": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
- "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
- "dev": true,
- "license": "Python-2.0"
- },
- "node_modules/array-union": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz",
- "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/assertion-error": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz",
- "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/asynckit": {
- "version": "0.4.0",
- "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
- "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
- "license": "MIT"
- },
- "node_modules/autoprefixer": {
- "version": "10.4.23",
- "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.23.tgz",
- "integrity": "sha512-YYTXSFulfwytnjAPlw8QHncHJmlvFKtczb8InXaAx9Q0LbfDnfEYDE55omerIJKihhmU61Ft+cAOSzQVaBUmeA==",
- "dev": true,
- "funding": [
- {
- "type": "opencollective",
- "url": "https://opencollective.com/postcss/"
- },
- {
- "type": "tidelift",
- "url": "https://tidelift.com/funding/github/npm/autoprefixer"
- },
- {
- "type": "github",
- "url": "https://github.com/sponsors/ai"
- }
- ],
- "license": "MIT",
- "dependencies": {
- "browserslist": "^4.28.1",
- "caniuse-lite": "^1.0.30001760",
- "fraction.js": "^5.3.4",
- "picocolors": "^1.1.1",
- "postcss-value-parser": "^4.2.0"
- },
- "bin": {
- "autoprefixer": "bin/autoprefixer"
- },
- "engines": {
- "node": "^10 || ^12 || >=14"
- },
- "peerDependencies": {
- "postcss": "^8.1.0"
- }
- },
- "node_modules/axios": {
- "version": "1.13.2",
- "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.2.tgz",
- "integrity": "sha512-VPk9ebNqPcy5lRGuSlKx752IlDatOjT9paPlm8A7yOuW2Fbvp4X3JznJtT4f0GzGLLiWE9W8onz51SqLYwzGaA==",
- "license": "MIT",
- "dependencies": {
- "follow-redirects": "^1.15.6",
- "form-data": "^4.0.4",
- "proxy-from-env": "^1.1.0"
- }
- },
- "node_modules/babel-plugin-macros": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz",
- "integrity": "sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg==",
- "license": "MIT",
- "dependencies": {
- "@babel/runtime": "^7.12.5",
- "cosmiconfig": "^7.0.0",
- "resolve": "^1.19.0"
- },
- "engines": {
- "node": ">=10",
- "npm": ">=6"
- }
- },
- "node_modules/balanced-match": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
- "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/baseline-browser-mapping": {
- "version": "2.9.14",
- "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.14.tgz",
- "integrity": "sha512-B0xUquLkiGLgHhpPBqvl7GWegWBUNuujQ6kXd/r1U38ElPT6Ok8KZ8e+FpUGEc2ZoRQUzq/aUnaKFc/svWUGSg==",
- "dev": true,
- "license": "Apache-2.0",
- "bin": {
- "baseline-browser-mapping": "dist/cli.js"
- }
- },
- "node_modules/binary-extensions": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
- "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/boolbase": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz",
- "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==",
- "dev": true,
- "license": "ISC"
- },
- "node_modules/brace-expansion": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
- "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^1.0.0"
- }
- },
- "node_modules/braces": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
- "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "fill-range": "^7.1.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/browserslist": {
- "version": "4.28.1",
- "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz",
- "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==",
- "dev": true,
- "funding": [
- {
- "type": "opencollective",
- "url": "https://opencollective.com/browserslist"
- },
- {
- "type": "tidelift",
- "url": "https://tidelift.com/funding/github/npm/browserslist"
- },
- {
- "type": "github",
- "url": "https://github.com/sponsors/ai"
- }
- ],
- "license": "MIT",
- "dependencies": {
- "baseline-browser-mapping": "^2.9.0",
- "caniuse-lite": "^1.0.30001759",
- "electron-to-chromium": "^1.5.263",
- "node-releases": "^2.0.27",
- "update-browserslist-db": "^1.2.0"
- },
- "bin": {
- "browserslist": "cli.js"
- },
- "engines": {
- "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7"
- }
- },
- "node_modules/cac": {
- "version": "6.7.14",
- "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz",
- "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/call-bind-apply-helpers": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
- "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
- "license": "MIT",
- "dependencies": {
- "es-errors": "^1.3.0",
- "function-bind": "^1.1.2"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/callsites": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
- "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
- "license": "MIT",
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/camelcase": {
- "version": "5.3.1",
- "resolved": "https://registry.npmmirror.com/camelcase/-/camelcase-5.3.1.tgz",
- "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==",
- "license": "MIT",
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/camelcase-css": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz",
- "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/caniuse-lite": {
- "version": "1.0.30001763",
- "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001763.tgz",
- "integrity": "sha512-mh/dGtq56uN98LlNX9qdbKnzINhX0QzhiWBFEkFfsFO4QyCvL8YegrJAazCwXIeqkIob8BlZPGM3xdnY+sgmvQ==",
- "dev": true,
- "funding": [
- {
- "type": "opencollective",
- "url": "https://opencollective.com/browserslist"
- },
- {
- "type": "tidelift",
- "url": "https://tidelift.com/funding/github/npm/caniuse-lite"
- },
- {
- "type": "github",
- "url": "https://github.com/sponsors/ai"
- }
- ],
- "license": "CC-BY-4.0"
- },
- "node_modules/cfb": {
- "version": "1.2.2",
- "resolved": "https://registry.npmjs.org/cfb/-/cfb-1.2.2.tgz",
- "integrity": "sha512-KfdUZsSOw19/ObEWasvBP/Ac4reZvAGauZhs6S/gqNhXhI7cKwvlH7ulj+dOEYnca4bm4SGo8C1bTAQvnTjgQA==",
- "license": "Apache-2.0",
- "dependencies": {
- "adler-32": "~1.3.0",
- "crc-32": "~1.2.0"
- },
- "engines": {
- "node": ">=0.8"
- }
- },
- "node_modules/chai": {
- "version": "5.3.3",
- "resolved": "https://registry.npmjs.org/chai/-/chai-5.3.3.tgz",
- "integrity": "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "assertion-error": "^2.0.1",
- "check-error": "^2.1.1",
- "deep-eql": "^5.0.1",
- "loupe": "^3.1.0",
- "pathval": "^2.0.0"
- },
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/chalk": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
- "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ansi-styles": "^4.1.0",
- "supports-color": "^7.1.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/chalk/chalk?sponsor=1"
- }
- },
- "node_modules/chart.js": {
- "version": "4.5.1",
- "resolved": "https://registry.npmjs.org/chart.js/-/chart.js-4.5.1.tgz",
- "integrity": "sha512-GIjfiT9dbmHRiYi6Nl2yFCq7kkwdkp1W/lp2J99rX0yo9tgJGn3lKQATztIjb5tVtevcBtIdICNWqlq5+E8/Pw==",
- "license": "MIT",
- "dependencies": {
- "@kurkle/color": "^0.3.0"
- },
- "engines": {
- "pnpm": ">=8"
- }
- },
- "node_modules/check-error": {
- "version": "2.1.3",
- "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.3.tgz",
- "integrity": "sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 16"
- }
- },
- "node_modules/chokidar": {
- "version": "3.6.0",
- "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz",
- "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "anymatch": "~3.1.2",
- "braces": "~3.0.2",
- "glob-parent": "~5.1.2",
- "is-binary-path": "~2.1.0",
- "is-glob": "~4.0.1",
- "normalize-path": "~3.0.0",
- "readdirp": "~3.6.0"
- },
- "engines": {
- "node": ">= 8.10.0"
- },
- "funding": {
- "url": "https://paulmillr.com/funding/"
- },
- "optionalDependencies": {
- "fsevents": "~2.3.2"
- }
- },
- "node_modules/chokidar/node_modules/glob-parent": {
- "version": "5.1.2",
- "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
- "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "is-glob": "^4.0.1"
- },
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/cliui": {
- "version": "6.0.0",
- "resolved": "https://registry.npmmirror.com/cliui/-/cliui-6.0.0.tgz",
- "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==",
- "license": "ISC",
- "dependencies": {
- "string-width": "^4.2.0",
- "strip-ansi": "^6.0.0",
- "wrap-ansi": "^6.2.0"
- }
- },
- "node_modules/cliui/node_modules/emoji-regex": {
- "version": "8.0.0",
- "resolved": "https://registry.npmmirror.com/emoji-regex/-/emoji-regex-8.0.0.tgz",
- "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
- "license": "MIT"
- },
- "node_modules/cliui/node_modules/string-width": {
- "version": "4.2.3",
- "resolved": "https://registry.npmmirror.com/string-width/-/string-width-4.2.3.tgz",
- "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
- "license": "MIT",
- "dependencies": {
- "emoji-regex": "^8.0.0",
- "is-fullwidth-code-point": "^3.0.0",
- "strip-ansi": "^6.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/cliui/node_modules/wrap-ansi": {
- "version": "6.2.0",
- "resolved": "https://registry.npmmirror.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz",
- "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==",
- "license": "MIT",
- "dependencies": {
- "ansi-styles": "^4.0.0",
- "string-width": "^4.1.0",
- "strip-ansi": "^6.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/clsx": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz",
- "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==",
- "license": "MIT",
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/codepage": {
- "version": "1.15.0",
- "resolved": "https://registry.npmjs.org/codepage/-/codepage-1.15.0.tgz",
- "integrity": "sha512-3g6NUTPd/YtuuGrhMnOMRjFc+LJw/bnMp3+0r/Wcz3IXUuCosKRJvMphm5+Q+bvTVGcJJuRvVLuYba+WojaFaA==",
- "license": "Apache-2.0",
- "engines": {
- "node": ">=0.8"
- }
- },
- "node_modules/color-convert": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
- "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
- "license": "MIT",
- "dependencies": {
- "color-name": "~1.1.4"
- },
- "engines": {
- "node": ">=7.0.0"
- }
- },
- "node_modules/color-name": {
- "version": "1.1.4",
- "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
- "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
- "license": "MIT"
- },
- "node_modules/combined-stream": {
- "version": "1.0.8",
- "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
- "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
- "license": "MIT",
- "dependencies": {
- "delayed-stream": "~1.0.0"
- },
- "engines": {
- "node": ">= 0.8"
- }
- },
- "node_modules/commander": {
- "version": "4.1.1",
- "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz",
- "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/concat-map": {
- "version": "0.0.1",
- "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
- "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/config-chain": {
- "version": "1.1.13",
- "resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz",
- "integrity": "sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ini": "^1.3.4",
- "proto-list": "~1.2.1"
- }
- },
- "node_modules/convert-source-map": {
- "version": "1.9.0",
- "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz",
- "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==",
- "license": "MIT"
- },
- "node_modules/cosmiconfig": {
- "version": "7.1.0",
- "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.1.0.tgz",
- "integrity": "sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA==",
- "license": "MIT",
- "dependencies": {
- "@types/parse-json": "^4.0.0",
- "import-fresh": "^3.2.1",
- "parse-json": "^5.0.0",
- "path-type": "^4.0.0",
- "yaml": "^1.10.0"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/crc-32": {
- "version": "1.2.2",
- "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz",
- "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==",
- "license": "Apache-2.0",
- "bin": {
- "crc32": "bin/crc32.njs"
- },
- "engines": {
- "node": ">=0.8"
- }
- },
- "node_modules/cross-spawn": {
- "version": "7.0.6",
- "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
- "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "path-key": "^3.1.0",
- "shebang-command": "^2.0.0",
- "which": "^2.0.1"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/cssesc": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz",
- "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==",
- "dev": true,
- "license": "MIT",
- "bin": {
- "cssesc": "bin/cssesc"
- },
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/cssstyle": {
- "version": "4.6.0",
- "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-4.6.0.tgz",
- "integrity": "sha512-2z+rWdzbbSZv6/rhtvzvqeZQHrBaqgogqt85sqFNbabZOuFbCVFb8kPeEtZjiKkbrm395irpNKiYeFeLiQnFPg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@asamuzakjp/css-color": "^3.2.0",
- "rrweb-cssom": "^0.8.0"
- },
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/cssstyle/node_modules/rrweb-cssom": {
- "version": "0.8.0",
- "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.8.0.tgz",
- "integrity": "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/csstype": {
- "version": "3.2.3",
- "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz",
- "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==",
- "license": "MIT"
- },
- "node_modules/data-urls": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-5.0.0.tgz",
- "integrity": "sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "whatwg-mimetype": "^4.0.0",
- "whatwg-url": "^14.0.0"
- },
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/de-indent": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/de-indent/-/de-indent-1.0.2.tgz",
- "integrity": "sha512-e/1zu3xH5MQryN2zdVaF0OrdNLUbvWxzMbi+iNA6Bky7l1RoP8a2fIbRocyHclXt/arDrrR6lL3TqFD9pMQTsg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/debug": {
- "version": "4.4.3",
- "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
- "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
- "license": "MIT",
- "dependencies": {
- "ms": "^2.1.3"
- },
- "engines": {
- "node": ">=6.0"
- },
- "peerDependenciesMeta": {
- "supports-color": {
- "optional": true
- }
- }
- },
- "node_modules/decamelize": {
- "version": "1.2.0",
- "resolved": "https://registry.npmmirror.com/decamelize/-/decamelize-1.2.0.tgz",
- "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==",
- "license": "MIT",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/decimal.js": {
- "version": "10.6.0",
- "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.6.0.tgz",
- "integrity": "sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/deep-eql": {
- "version": "5.0.2",
- "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz",
- "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/deep-is": {
- "version": "0.1.4",
- "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
- "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/delayed-stream": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
- "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
- "license": "MIT",
- "engines": {
- "node": ">=0.4.0"
- }
- },
- "node_modules/didyoumean": {
- "version": "1.2.2",
- "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz",
- "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==",
- "dev": true,
- "license": "Apache-2.0"
- },
- "node_modules/dijkstrajs": {
- "version": "1.0.3",
- "resolved": "https://registry.npmmirror.com/dijkstrajs/-/dijkstrajs-1.0.3.tgz",
- "integrity": "sha512-qiSlmBq9+BCdCA/L46dw8Uy93mloxsPSbwnm5yrKn2vMPiy8KyAskTF6zuV/j5BMsmOGZDPs7KjU+mjb670kfA==",
- "license": "MIT"
- },
- "node_modules/dir-glob": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz",
- "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "path-type": "^4.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/dlv": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz",
- "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/doctrine": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
- "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==",
- "dev": true,
- "license": "Apache-2.0",
- "dependencies": {
- "esutils": "^2.0.2"
- },
- "engines": {
- "node": ">=6.0.0"
- }
- },
- "node_modules/driver.js": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/driver.js/-/driver.js-1.4.0.tgz",
- "integrity": "sha512-Gm64jm6PmcU+si21sQhBrTAM1JvUrR0QhNmjkprNLxohOBzul9+pNHXgQaT9lW84gwg9GMLB3NZGuGolsz5uew==",
- "license": "MIT"
- },
- "node_modules/dunder-proto": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
- "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
- "license": "MIT",
- "dependencies": {
- "call-bind-apply-helpers": "^1.0.1",
- "es-errors": "^1.3.0",
- "gopd": "^1.2.0"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/eastasianwidth": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz",
- "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/editorconfig": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/editorconfig/-/editorconfig-1.0.4.tgz",
- "integrity": "sha512-L9Qe08KWTlqYMVvMcTIvMAdl1cDUubzRNYL+WfA4bLDMHe4nemKkpmYzkznE1FwLKu0EEmy6obgQKzMJrg4x9Q==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@one-ini/wasm": "0.1.1",
- "commander": "^10.0.0",
- "minimatch": "9.0.1",
- "semver": "^7.5.3"
- },
- "bin": {
- "editorconfig": "bin/editorconfig"
- },
- "engines": {
- "node": ">=14"
- }
- },
- "node_modules/editorconfig/node_modules/commander": {
- "version": "10.0.1",
- "resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz",
- "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=14"
- }
- },
- "node_modules/editorconfig/node_modules/minimatch": {
- "version": "9.0.1",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.1.tgz",
- "integrity": "sha512-0jWhJpD/MdhPXwPuiRkCbfYfSKp2qnn2eOc279qI7f+osl/l+prKSrvhg157zSYvx/1nmgn2NqdT6k2Z7zSH9w==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "brace-expansion": "^2.0.1"
- },
- "engines": {
- "node": ">=16 || 14 >=14.17"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/electron-to-chromium": {
- "version": "1.5.267",
- "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.267.tgz",
- "integrity": "sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw==",
- "dev": true,
- "license": "ISC"
- },
- "node_modules/emoji-regex": {
- "version": "9.2.2",
- "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
- "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/entities": {
- "version": "7.0.0",
- "resolved": "https://registry.npmjs.org/entities/-/entities-7.0.0.tgz",
- "integrity": "sha512-FDWG5cmEYf2Z00IkYRhbFrwIwvdFKH07uV8dvNy0omp/Qb1xcyCWp2UDtcwJF4QZZvk0sLudP6/hAu42TaqVhQ==",
- "license": "BSD-2-Clause",
- "engines": {
- "node": ">=0.12"
- },
- "funding": {
- "url": "https://github.com/fb55/entities?sponsor=1"
- }
- },
- "node_modules/error-ex": {
- "version": "1.3.4",
- "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz",
- "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==",
- "license": "MIT",
- "dependencies": {
- "is-arrayish": "^0.2.1"
- }
- },
- "node_modules/es-define-property": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
- "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/es-errors": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
- "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/es-module-lexer": {
- "version": "1.7.0",
- "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz",
- "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/es-object-atoms": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
- "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
- "license": "MIT",
- "dependencies": {
- "es-errors": "^1.3.0"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/es-set-tostringtag": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
- "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
- "license": "MIT",
- "dependencies": {
- "es-errors": "^1.3.0",
- "get-intrinsic": "^1.2.6",
- "has-tostringtag": "^1.0.2",
- "hasown": "^2.0.2"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/esbuild": {
- "version": "0.21.5",
- "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz",
- "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==",
- "dev": true,
- "hasInstallScript": true,
- "license": "MIT",
- "bin": {
- "esbuild": "bin/esbuild"
- },
- "engines": {
- "node": ">=12"
- },
- "optionalDependencies": {
- "@esbuild/aix-ppc64": "0.21.5",
- "@esbuild/android-arm": "0.21.5",
- "@esbuild/android-arm64": "0.21.5",
- "@esbuild/android-x64": "0.21.5",
- "@esbuild/darwin-arm64": "0.21.5",
- "@esbuild/darwin-x64": "0.21.5",
- "@esbuild/freebsd-arm64": "0.21.5",
- "@esbuild/freebsd-x64": "0.21.5",
- "@esbuild/linux-arm": "0.21.5",
- "@esbuild/linux-arm64": "0.21.5",
- "@esbuild/linux-ia32": "0.21.5",
- "@esbuild/linux-loong64": "0.21.5",
- "@esbuild/linux-mips64el": "0.21.5",
- "@esbuild/linux-ppc64": "0.21.5",
- "@esbuild/linux-riscv64": "0.21.5",
- "@esbuild/linux-s390x": "0.21.5",
- "@esbuild/linux-x64": "0.21.5",
- "@esbuild/netbsd-x64": "0.21.5",
- "@esbuild/openbsd-x64": "0.21.5",
- "@esbuild/sunos-x64": "0.21.5",
- "@esbuild/win32-arm64": "0.21.5",
- "@esbuild/win32-ia32": "0.21.5",
- "@esbuild/win32-x64": "0.21.5"
- }
- },
- "node_modules/escalade": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
- "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/escape-string-regexp": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
- "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
- "license": "MIT",
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/eslint": {
- "version": "8.57.1",
- "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz",
- "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==",
- "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@eslint-community/eslint-utils": "^4.2.0",
- "@eslint-community/regexpp": "^4.6.1",
- "@eslint/eslintrc": "^2.1.4",
- "@eslint/js": "8.57.1",
- "@humanwhocodes/config-array": "^0.13.0",
- "@humanwhocodes/module-importer": "^1.0.1",
- "@nodelib/fs.walk": "^1.2.8",
- "@ungap/structured-clone": "^1.2.0",
- "ajv": "^6.12.4",
- "chalk": "^4.0.0",
- "cross-spawn": "^7.0.2",
- "debug": "^4.3.2",
- "doctrine": "^3.0.0",
- "escape-string-regexp": "^4.0.0",
- "eslint-scope": "^7.2.2",
- "eslint-visitor-keys": "^3.4.3",
- "espree": "^9.6.1",
- "esquery": "^1.4.2",
- "esutils": "^2.0.2",
- "fast-deep-equal": "^3.1.3",
- "file-entry-cache": "^6.0.1",
- "find-up": "^5.0.0",
- "glob-parent": "^6.0.2",
- "globals": "^13.19.0",
- "graphemer": "^1.4.0",
- "ignore": "^5.2.0",
- "imurmurhash": "^0.1.4",
- "is-glob": "^4.0.0",
- "is-path-inside": "^3.0.3",
- "js-yaml": "^4.1.0",
- "json-stable-stringify-without-jsonify": "^1.0.1",
- "levn": "^0.4.1",
- "lodash.merge": "^4.6.2",
- "minimatch": "^3.1.2",
- "natural-compare": "^1.4.0",
- "optionator": "^0.9.3",
- "strip-ansi": "^6.0.1",
- "text-table": "^0.2.0"
- },
- "bin": {
- "eslint": "bin/eslint.js"
- },
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/eslint-plugin-vue": {
- "version": "9.33.0",
- "resolved": "https://registry.npmjs.org/eslint-plugin-vue/-/eslint-plugin-vue-9.33.0.tgz",
- "integrity": "sha512-174lJKuNsuDIlLpjeXc5E2Tss8P44uIimAfGD0b90k0NoirJqpG7stLuU9Vp/9ioTOrQdWVREc4mRd1BD+CvGw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@eslint-community/eslint-utils": "^4.4.0",
- "globals": "^13.24.0",
- "natural-compare": "^1.4.0",
- "nth-check": "^2.1.1",
- "postcss-selector-parser": "^6.0.15",
- "semver": "^7.6.3",
- "vue-eslint-parser": "^9.4.3",
- "xml-name-validator": "^4.0.0"
- },
- "engines": {
- "node": "^14.17.0 || >=16.0.0"
- },
- "peerDependencies": {
- "eslint": "^6.2.0 || ^7.0.0 || ^8.0.0 || ^9.0.0"
- }
- },
- "node_modules/eslint-scope": {
- "version": "7.2.2",
- "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz",
- "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==",
- "dev": true,
- "license": "BSD-2-Clause",
- "dependencies": {
- "esrecurse": "^4.3.0",
- "estraverse": "^5.2.0"
- },
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/eslint-visitor-keys": {
- "version": "3.4.3",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz",
- "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==",
- "dev": true,
- "license": "Apache-2.0",
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/eslint/node_modules/brace-expansion": {
- "version": "1.1.12",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
- "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- }
- },
- "node_modules/eslint/node_modules/minimatch": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
- "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "brace-expansion": "^1.1.7"
- },
- "engines": {
- "node": "*"
- }
- },
- "node_modules/espree": {
- "version": "9.6.1",
- "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz",
- "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==",
- "dev": true,
- "license": "BSD-2-Clause",
- "dependencies": {
- "acorn": "^8.9.0",
- "acorn-jsx": "^5.3.2",
- "eslint-visitor-keys": "^3.4.1"
- },
- "engines": {
- "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/eslint"
- }
- },
- "node_modules/esquery": {
- "version": "1.7.0",
- "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.7.0.tgz",
- "integrity": "sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==",
- "dev": true,
- "license": "BSD-3-Clause",
- "dependencies": {
- "estraverse": "^5.1.0"
- },
- "engines": {
- "node": ">=0.10"
- }
- },
- "node_modules/esrecurse": {
- "version": "4.3.0",
- "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz",
- "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
- "dev": true,
- "license": "BSD-2-Clause",
- "dependencies": {
- "estraverse": "^5.2.0"
- },
- "engines": {
- "node": ">=4.0"
- }
- },
- "node_modules/estraverse": {
- "version": "5.3.0",
- "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz",
- "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==",
- "dev": true,
- "license": "BSD-2-Clause",
- "engines": {
- "node": ">=4.0"
- }
- },
- "node_modules/estree-walker": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz",
- "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==",
- "license": "MIT"
- },
- "node_modules/esutils": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
- "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
- "dev": true,
- "license": "BSD-2-Clause",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/expect-type": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz",
- "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==",
- "dev": true,
- "license": "Apache-2.0",
- "engines": {
- "node": ">=12.0.0"
- }
- },
- "node_modules/fast-deep-equal": {
- "version": "3.1.3",
- "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
- "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/fast-glob": {
- "version": "3.3.3",
- "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz",
- "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@nodelib/fs.stat": "^2.0.2",
- "@nodelib/fs.walk": "^1.2.3",
- "glob-parent": "^5.1.2",
- "merge2": "^1.3.0",
- "micromatch": "^4.0.8"
- },
- "engines": {
- "node": ">=8.6.0"
- }
- },
- "node_modules/fast-glob/node_modules/glob-parent": {
- "version": "5.1.2",
- "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
- "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "is-glob": "^4.0.1"
- },
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/fast-json-stable-stringify": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
- "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/fast-levenshtein": {
- "version": "2.0.6",
- "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
- "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/fastq": {
- "version": "1.20.1",
- "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz",
- "integrity": "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "reusify": "^1.0.4"
- }
- },
- "node_modules/fdir": {
- "version": "6.5.0",
- "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz",
- "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=12.0.0"
- },
- "peerDependencies": {
- "picomatch": "^3 || ^4"
- },
- "peerDependenciesMeta": {
- "picomatch": {
- "optional": true
- }
- }
- },
- "node_modules/file-entry-cache": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
- "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "flat-cache": "^3.0.4"
- },
- "engines": {
- "node": "^10.12.0 || >=12.0.0"
- }
- },
- "node_modules/file-saver": {
- "version": "2.0.5",
- "resolved": "https://registry.npmjs.org/file-saver/-/file-saver-2.0.5.tgz",
- "integrity": "sha512-P9bmyZ3h/PRG+Nzga+rbdI4OEpNDzAVyy74uVO9ATgzLK6VtAsYybF/+TOCvrc0MO793d6+42lLyZTw7/ArVzA==",
- "license": "MIT"
- },
- "node_modules/fill-range": {
- "version": "7.1.1",
- "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
- "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "to-regex-range": "^5.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/find-root": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/find-root/-/find-root-1.1.0.tgz",
- "integrity": "sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng==",
- "license": "MIT"
- },
- "node_modules/find-up": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
- "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "locate-path": "^6.0.0",
- "path-exists": "^4.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/flat-cache": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz",
- "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "flatted": "^3.2.9",
- "keyv": "^4.5.3",
- "rimraf": "^3.0.2"
- },
- "engines": {
- "node": "^10.12.0 || >=12.0.0"
- }
- },
- "node_modules/flatted": {
- "version": "3.3.3",
- "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz",
- "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==",
- "dev": true,
- "license": "ISC"
- },
- "node_modules/follow-redirects": {
- "version": "1.15.11",
- "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz",
- "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==",
- "funding": [
- {
- "type": "individual",
- "url": "https://github.com/sponsors/RubenVerborgh"
- }
- ],
- "license": "MIT",
- "engines": {
- "node": ">=4.0"
- },
- "peerDependenciesMeta": {
- "debug": {
- "optional": true
- }
- }
- },
- "node_modules/foreground-child": {
- "version": "3.3.1",
- "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz",
- "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "cross-spawn": "^7.0.6",
- "signal-exit": "^4.0.1"
- },
- "engines": {
- "node": ">=14"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/form-data": {
- "version": "4.0.5",
- "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz",
- "integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==",
- "license": "MIT",
- "dependencies": {
- "asynckit": "^0.4.0",
- "combined-stream": "^1.0.8",
- "es-set-tostringtag": "^2.1.0",
- "hasown": "^2.0.2",
- "mime-types": "^2.1.12"
- },
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/frac": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/frac/-/frac-1.1.2.tgz",
- "integrity": "sha512-w/XBfkibaTl3YDqASwfDUqkna4Z2p9cFSr1aHDt0WoMTECnRfBOv2WArlZILlqgWlmdIlALXGpM2AOhEk5W3IA==",
- "license": "Apache-2.0",
- "engines": {
- "node": ">=0.8"
- }
- },
- "node_modules/fraction.js": {
- "version": "5.3.4",
- "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.4.tgz",
- "integrity": "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": "*"
- },
- "funding": {
- "type": "github",
- "url": "https://github.com/sponsors/rawify"
- }
- },
- "node_modules/fs.realpath": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
- "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==",
- "dev": true,
- "license": "ISC"
- },
- "node_modules/fsevents": {
- "version": "2.3.3",
- "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
- "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
- "dev": true,
- "hasInstallScript": true,
- "license": "MIT",
- "optional": true,
- "os": [
- "darwin"
- ],
- "engines": {
- "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
- }
- },
- "node_modules/function-bind": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
- "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
- "license": "MIT",
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/get-caller-file": {
- "version": "2.0.5",
- "resolved": "https://registry.npmmirror.com/get-caller-file/-/get-caller-file-2.0.5.tgz",
- "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
- "license": "ISC",
- "engines": {
- "node": "6.* || 8.* || >= 10.*"
- }
- },
- "node_modules/get-intrinsic": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
- "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
- "license": "MIT",
- "dependencies": {
- "call-bind-apply-helpers": "^1.0.2",
- "es-define-property": "^1.0.1",
- "es-errors": "^1.3.0",
- "es-object-atoms": "^1.1.1",
- "function-bind": "^1.1.2",
- "get-proto": "^1.0.1",
- "gopd": "^1.2.0",
- "has-symbols": "^1.1.0",
- "hasown": "^2.0.2",
- "math-intrinsics": "^1.1.0"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/get-proto": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
- "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
- "license": "MIT",
- "dependencies": {
- "dunder-proto": "^1.0.1",
- "es-object-atoms": "^1.0.0"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/glob": {
- "version": "7.2.3",
- "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
- "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
- "deprecated": "Glob versions prior to v9 are no longer supported",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "fs.realpath": "^1.0.0",
- "inflight": "^1.0.4",
- "inherits": "2",
- "minimatch": "^3.1.1",
- "once": "^1.3.0",
- "path-is-absolute": "^1.0.0"
- },
- "engines": {
- "node": "*"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/glob-parent": {
- "version": "6.0.2",
- "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz",
- "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "is-glob": "^4.0.3"
- },
- "engines": {
- "node": ">=10.13.0"
- }
- },
- "node_modules/glob/node_modules/brace-expansion": {
- "version": "1.1.12",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
- "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- }
- },
- "node_modules/glob/node_modules/minimatch": {
- "version": "3.1.2",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
- "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "brace-expansion": "^1.1.7"
- },
- "engines": {
- "node": "*"
- }
- },
- "node_modules/globals": {
- "version": "13.24.0",
- "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz",
- "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "type-fest": "^0.20.2"
- },
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/globby": {
- "version": "11.1.0",
- "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz",
- "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "array-union": "^2.1.0",
- "dir-glob": "^3.0.1",
- "fast-glob": "^3.2.9",
- "ignore": "^5.2.0",
- "merge2": "^1.4.1",
- "slash": "^3.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/gopd": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
- "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/graphemer": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz",
- "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/has-flag": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
- "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/has-symbols": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
- "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/has-tostringtag": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
- "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
- "license": "MIT",
- "dependencies": {
- "has-symbols": "^1.0.3"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/hasown": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
- "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
- "license": "MIT",
- "dependencies": {
- "function-bind": "^1.1.2"
- },
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/he": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz",
- "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==",
- "dev": true,
- "license": "MIT",
- "bin": {
- "he": "bin/he"
- }
- },
- "node_modules/hoist-non-react-statics": {
- "version": "3.3.2",
- "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz",
- "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==",
- "license": "BSD-3-Clause",
- "dependencies": {
- "react-is": "^16.7.0"
- }
- },
- "node_modules/hoist-non-react-statics/node_modules/react-is": {
- "version": "16.13.1",
- "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz",
- "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==",
- "license": "MIT"
- },
- "node_modules/html-encoding-sniffer": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz",
- "integrity": "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "whatwg-encoding": "^3.1.1"
- },
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/html-escaper": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz",
- "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/http-proxy-agent": {
- "version": "7.0.2",
- "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz",
- "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "agent-base": "^7.1.0",
- "debug": "^4.3.4"
- },
- "engines": {
- "node": ">= 14"
- }
- },
- "node_modules/https-proxy-agent": {
- "version": "7.0.6",
- "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
- "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "agent-base": "^7.1.2",
- "debug": "4"
- },
- "engines": {
- "node": ">= 14"
- }
- },
- "node_modules/iconv-lite": {
- "version": "0.6.3",
- "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",
- "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "safer-buffer": ">= 2.1.2 < 3.0.0"
- },
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/ignore": {
- "version": "5.3.2",
- "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz",
- "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 4"
- }
- },
- "node_modules/import-fresh": {
- "version": "3.3.1",
- "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz",
- "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==",
- "license": "MIT",
- "dependencies": {
- "parent-module": "^1.0.0",
- "resolve-from": "^4.0.0"
- },
- "engines": {
- "node": ">=6"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/imurmurhash": {
- "version": "0.1.4",
- "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
- "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=0.8.19"
- }
- },
- "node_modules/inflight": {
- "version": "1.0.6",
- "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
- "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
- "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "once": "^1.3.0",
- "wrappy": "1"
- }
- },
- "node_modules/inherits": {
- "version": "2.0.4",
- "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
- "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
- "dev": true,
- "license": "ISC"
- },
- "node_modules/ini": {
- "version": "1.3.8",
- "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
- "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==",
- "dev": true,
- "license": "ISC"
- },
- "node_modules/is-arrayish": {
- "version": "0.2.1",
- "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz",
- "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==",
- "license": "MIT"
- },
- "node_modules/is-binary-path": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
- "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "binary-extensions": "^2.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/is-core-module": {
- "version": "2.16.1",
- "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
- "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==",
- "license": "MIT",
- "dependencies": {
- "hasown": "^2.0.2"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/is-extglob": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
- "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/is-fullwidth-code-point": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
- "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/is-glob": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
- "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "is-extglob": "^2.1.1"
- },
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/is-mobile": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/is-mobile/-/is-mobile-5.0.0.tgz",
- "integrity": "sha512-Tz/yndySvLAEXh+Uk8liFCxOwVH6YutuR74utvOcu7I9Di+DwM0mtdPVZNaVvvBUM2OXxne/NhOs1zAO7riusQ==",
- "license": "MIT"
- },
- "node_modules/is-number": {
- "version": "7.0.0",
- "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
- "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=0.12.0"
- }
- },
- "node_modules/is-path-inside": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz",
- "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/is-potential-custom-element-name": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz",
- "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/isexe": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
- "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
- "dev": true,
- "license": "ISC"
- },
- "node_modules/istanbul-lib-coverage": {
- "version": "3.2.2",
- "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz",
- "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==",
- "dev": true,
- "license": "BSD-3-Clause",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/istanbul-lib-report": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz",
- "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==",
- "dev": true,
- "license": "BSD-3-Clause",
- "dependencies": {
- "istanbul-lib-coverage": "^3.0.0",
- "make-dir": "^4.0.0",
- "supports-color": "^7.1.0"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/istanbul-lib-source-maps": {
- "version": "5.0.6",
- "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-5.0.6.tgz",
- "integrity": "sha512-yg2d+Em4KizZC5niWhQaIomgf5WlL4vOOjZ5xGCmF8SnPE/mDWWXgvRExdcpCgh9lLRRa1/fSYp2ymmbJ1pI+A==",
- "dev": true,
- "license": "BSD-3-Clause",
- "dependencies": {
- "@jridgewell/trace-mapping": "^0.3.23",
- "debug": "^4.1.1",
- "istanbul-lib-coverage": "^3.0.0"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/istanbul-reports": {
- "version": "3.2.0",
- "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.2.0.tgz",
- "integrity": "sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==",
- "dev": true,
- "license": "BSD-3-Clause",
- "dependencies": {
- "html-escaper": "^2.0.0",
- "istanbul-lib-report": "^3.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/jackspeak": {
- "version": "3.4.3",
- "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
- "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==",
- "dev": true,
- "license": "BlueOak-1.0.0",
- "dependencies": {
- "@isaacs/cliui": "^8.0.2"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- },
- "optionalDependencies": {
- "@pkgjs/parseargs": "^0.11.0"
- }
- },
- "node_modules/jiti": {
- "version": "1.21.7",
- "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz",
- "integrity": "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==",
- "dev": true,
- "license": "MIT",
- "bin": {
- "jiti": "bin/jiti.js"
- }
- },
- "node_modules/js-beautify": {
- "version": "1.15.4",
- "resolved": "https://registry.npmjs.org/js-beautify/-/js-beautify-1.15.4.tgz",
- "integrity": "sha512-9/KXeZUKKJwqCXUdBxFJ3vPh467OCckSBmYDwSK/EtV090K+iMJ7zx2S3HLVDIWFQdqMIsZWbnaGiba18aWhaA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "config-chain": "^1.1.13",
- "editorconfig": "^1.0.4",
- "glob": "^10.4.2",
- "js-cookie": "^3.0.5",
- "nopt": "^7.2.1"
- },
- "bin": {
- "css-beautify": "js/bin/css-beautify.js",
- "html-beautify": "js/bin/html-beautify.js",
- "js-beautify": "js/bin/js-beautify.js"
- },
- "engines": {
- "node": ">=14"
- }
- },
- "node_modules/js-beautify/node_modules/glob": {
- "version": "10.5.0",
- "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz",
- "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "foreground-child": "^3.1.0",
- "jackspeak": "^3.1.2",
- "minimatch": "^9.0.4",
- "minipass": "^7.1.2",
- "package-json-from-dist": "^1.0.0",
- "path-scurry": "^1.11.1"
- },
- "bin": {
- "glob": "dist/esm/bin.mjs"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/js-cookie": {
- "version": "3.0.5",
- "resolved": "https://registry.npmjs.org/js-cookie/-/js-cookie-3.0.5.tgz",
- "integrity": "sha512-cEiJEAEoIbWfCZYKWhVwFuvPX1gETRYPw6LlaTKoxD3s2AkXzkCjnp6h0V77ozyqj0jakteJ4YqDJT830+lVGw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=14"
- }
- },
- "node_modules/js-tokens": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
- "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
- "license": "MIT"
- },
- "node_modules/js-yaml": {
- "version": "4.1.1",
- "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz",
- "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "argparse": "^2.0.1"
- },
- "bin": {
- "js-yaml": "bin/js-yaml.js"
- }
- },
- "node_modules/jsdom": {
- "version": "24.1.3",
- "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-24.1.3.tgz",
- "integrity": "sha512-MyL55p3Ut3cXbeBEG7Hcv0mVM8pp8PBNWxRqchZnSfAiES1v1mRnMeFfaHWIPULpwsYfvO+ZmMZz5tGCnjzDUQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "cssstyle": "^4.0.1",
- "data-urls": "^5.0.0",
- "decimal.js": "^10.4.3",
- "form-data": "^4.0.0",
- "html-encoding-sniffer": "^4.0.0",
- "http-proxy-agent": "^7.0.2",
- "https-proxy-agent": "^7.0.5",
- "is-potential-custom-element-name": "^1.0.1",
- "nwsapi": "^2.2.12",
- "parse5": "^7.1.2",
- "rrweb-cssom": "^0.7.1",
- "saxes": "^6.0.0",
- "symbol-tree": "^3.2.4",
- "tough-cookie": "^4.1.4",
- "w3c-xmlserializer": "^5.0.0",
- "webidl-conversions": "^7.0.0",
- "whatwg-encoding": "^3.1.1",
- "whatwg-mimetype": "^4.0.0",
- "whatwg-url": "^14.0.0",
- "ws": "^8.18.0",
- "xml-name-validator": "^5.0.0"
- },
- "engines": {
- "node": ">=18"
- },
- "peerDependencies": {
- "canvas": "^2.11.2"
- },
- "peerDependenciesMeta": {
- "canvas": {
- "optional": true
- }
- }
- },
- "node_modules/jsdom/node_modules/xml-name-validator": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-5.0.0.tgz",
- "integrity": "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==",
- "dev": true,
- "license": "Apache-2.0",
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/jsesc": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz",
- "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==",
- "license": "MIT",
- "bin": {
- "jsesc": "bin/jsesc"
- },
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/json-buffer": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz",
- "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/json-parse-even-better-errors": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz",
- "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==",
- "license": "MIT"
- },
- "node_modules/json-schema-traverse": {
- "version": "0.4.1",
- "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
- "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/json-stable-stringify-without-jsonify": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
- "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/keyv": {
- "version": "4.5.4",
- "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz",
- "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "json-buffer": "3.0.1"
- }
- },
- "node_modules/levn": {
- "version": "0.4.1",
- "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz",
- "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "prelude-ls": "^1.2.1",
- "type-check": "~0.4.0"
- },
- "engines": {
- "node": ">= 0.8.0"
- }
- },
- "node_modules/lilconfig": {
- "version": "3.1.3",
- "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz",
- "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=14"
- },
- "funding": {
- "url": "https://github.com/sponsors/antonk52"
- }
- },
- "node_modules/lines-and-columns": {
- "version": "1.2.4",
- "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz",
- "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==",
- "license": "MIT"
- },
- "node_modules/locate-path": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
- "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "p-locate": "^5.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/lodash": {
- "version": "4.17.21",
- "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
- "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/lodash.merge": {
- "version": "4.6.2",
- "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
- "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/loupe": {
- "version": "3.2.1",
- "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.2.1.tgz",
- "integrity": "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/lru-cache": {
- "version": "10.4.3",
- "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
- "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
- "dev": true,
- "license": "ISC"
- },
- "node_modules/lucide-react": {
- "version": "0.469.0",
- "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.469.0.tgz",
- "integrity": "sha512-28vvUnnKQ/dBwiCQtwJw7QauYnE7yd2Cyp4tTTJpvglX4EMpbflcdBgrgToX2j71B3YvugK/NH3BGUk+E/p/Fw==",
- "license": "ISC",
- "peerDependencies": {
- "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0"
- }
- },
- "node_modules/magic-string": {
- "version": "0.30.21",
- "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz",
- "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==",
- "license": "MIT",
- "dependencies": {
- "@jridgewell/sourcemap-codec": "^1.5.5"
- }
- },
- "node_modules/magicast": {
- "version": "0.3.5",
- "resolved": "https://registry.npmjs.org/magicast/-/magicast-0.3.5.tgz",
- "integrity": "sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@babel/parser": "^7.25.4",
- "@babel/types": "^7.25.4",
- "source-map-js": "^1.2.0"
- }
- },
- "node_modules/make-dir": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz",
- "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "semver": "^7.5.3"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/math-intrinsics": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
- "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.4"
- }
- },
- "node_modules/merge2": {
- "version": "1.4.1",
- "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz",
- "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/micromatch": {
- "version": "4.0.8",
- "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
- "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "braces": "^3.0.3",
- "picomatch": "^2.3.1"
- },
- "engines": {
- "node": ">=8.6"
- }
- },
- "node_modules/mime-db": {
- "version": "1.52.0",
- "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
- "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/mime-types": {
- "version": "2.1.35",
- "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
- "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
- "license": "MIT",
- "dependencies": {
- "mime-db": "1.52.0"
- },
- "engines": {
- "node": ">= 0.6"
- }
- },
- "node_modules/minimatch": {
- "version": "9.0.5",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
- "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "brace-expansion": "^2.0.1"
- },
- "engines": {
- "node": ">=16 || 14 >=14.17"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/minipass": {
- "version": "7.1.2",
- "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz",
- "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==",
- "dev": true,
- "license": "ISC",
- "engines": {
- "node": ">=16 || 14 >=14.17"
- }
- },
- "node_modules/ms": {
- "version": "2.1.3",
- "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
- "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
- "license": "MIT"
- },
- "node_modules/muggle-string": {
- "version": "0.4.1",
- "resolved": "https://registry.npmjs.org/muggle-string/-/muggle-string-0.4.1.tgz",
- "integrity": "sha512-VNTrAak/KhO2i8dqqnqnAHOa3cYBwXEZe9h+D5h/1ZqFSTEFHdM65lR7RoIqq3tBBYavsOXV84NoHXZ0AkPyqQ==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/mz": {
- "version": "2.7.0",
- "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz",
- "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "any-promise": "^1.0.0",
- "object-assign": "^4.0.1",
- "thenify-all": "^1.0.0"
- }
- },
- "node_modules/nanoid": {
- "version": "3.3.11",
- "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz",
- "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==",
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/ai"
- }
- ],
- "license": "MIT",
- "bin": {
- "nanoid": "bin/nanoid.cjs"
- },
- "engines": {
- "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
- }
- },
- "node_modules/natural-compare": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
- "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/node-releases": {
- "version": "2.0.27",
- "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz",
- "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/nopt": {
- "version": "7.2.1",
- "resolved": "https://registry.npmjs.org/nopt/-/nopt-7.2.1.tgz",
- "integrity": "sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "abbrev": "^2.0.0"
- },
- "bin": {
- "nopt": "bin/nopt.js"
- },
- "engines": {
- "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
- }
- },
- "node_modules/normalize-path": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
- "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/npm-run-path": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-6.0.0.tgz",
- "integrity": "sha512-9qny7Z9DsQU8Ou39ERsPU4OZQlSTP47ShQzuKZ6PRXpYLtIFgl/DEBYEXKlvcEa+9tHVcK8CF81Y2V72qaZhWA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "path-key": "^4.0.0",
- "unicorn-magic": "^0.3.0"
- },
- "engines": {
- "node": ">=18"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/npm-run-path/node_modules/path-key": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz",
- "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/nth-check": {
- "version": "2.1.1",
- "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz",
- "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==",
- "dev": true,
- "license": "BSD-2-Clause",
- "dependencies": {
- "boolbase": "^1.0.0"
- },
- "funding": {
- "url": "https://github.com/fb55/nth-check?sponsor=1"
- }
- },
- "node_modules/nwsapi": {
- "version": "2.2.23",
- "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.23.tgz",
- "integrity": "sha512-7wfH4sLbt4M0gCDzGE6vzQBo0bfTKjU7Sfpqy/7gs1qBfYz2vEJH6vXcBKpO3+6Yu1telwd0t9HpyOoLEQQbIQ==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/object-assign": {
- "version": "4.1.1",
- "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
- "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/object-hash": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz",
- "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/once": {
- "version": "1.4.0",
- "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
- "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "wrappy": "1"
- }
- },
- "node_modules/optionator": {
- "version": "0.9.4",
- "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz",
- "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "deep-is": "^0.1.3",
- "fast-levenshtein": "^2.0.6",
- "levn": "^0.4.1",
- "prelude-ls": "^1.2.1",
- "type-check": "^0.4.0",
- "word-wrap": "^1.2.5"
- },
- "engines": {
- "node": ">= 0.8.0"
- }
- },
- "node_modules/p-limit": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
- "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "yocto-queue": "^0.1.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/p-locate": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
- "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "p-limit": "^3.0.2"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/p-try": {
- "version": "2.2.0",
- "resolved": "https://registry.npmmirror.com/p-try/-/p-try-2.2.0.tgz",
- "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==",
- "license": "MIT",
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/package-json-from-dist": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz",
- "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==",
- "dev": true,
- "license": "BlueOak-1.0.0"
- },
- "node_modules/parent-module": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
- "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
- "license": "MIT",
- "dependencies": {
- "callsites": "^3.0.0"
- },
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/parse-json": {
- "version": "5.2.0",
- "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz",
- "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==",
- "license": "MIT",
- "dependencies": {
- "@babel/code-frame": "^7.0.0",
- "error-ex": "^1.3.1",
- "json-parse-even-better-errors": "^2.3.0",
- "lines-and-columns": "^1.1.6"
- },
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/parse5": {
- "version": "7.3.0",
- "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz",
- "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "entities": "^6.0.0"
- },
- "funding": {
- "url": "https://github.com/inikulin/parse5?sponsor=1"
- }
- },
- "node_modules/parse5/node_modules/entities": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz",
- "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==",
- "dev": true,
- "license": "BSD-2-Clause",
- "engines": {
- "node": ">=0.12"
- },
- "funding": {
- "url": "https://github.com/fb55/entities?sponsor=1"
- }
- },
- "node_modules/path-browserify": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz",
- "integrity": "sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/path-exists": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
- "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/path-is-absolute": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
- "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/path-key": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
- "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/path-parse": {
- "version": "1.0.7",
- "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
- "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
- "license": "MIT"
- },
- "node_modules/path-scurry": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
- "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==",
- "dev": true,
- "license": "BlueOak-1.0.0",
- "dependencies": {
- "lru-cache": "^10.2.0",
- "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
- },
- "engines": {
- "node": ">=16 || 14 >=14.18"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/path-type": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz",
- "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==",
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/pathe": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/pathe/-/pathe-1.1.2.tgz",
- "integrity": "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/pathval": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.1.tgz",
- "integrity": "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 14.16"
- }
- },
- "node_modules/picocolors": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
- "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
- "license": "ISC"
- },
- "node_modules/picomatch": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
- "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8.6"
- },
- "funding": {
- "url": "https://github.com/sponsors/jonschlinkert"
- }
- },
- "node_modules/pify": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz",
- "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/pinia": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/pinia/-/pinia-2.3.1.tgz",
- "integrity": "sha512-khUlZSwt9xXCaTbbxFYBKDc/bWAGWJjOgvxETwkTN7KRm66EeT1ZdZj6i2ceh9sP2Pzqsbc704r2yngBrxBVug==",
- "license": "MIT",
- "dependencies": {
- "@vue/devtools-api": "^6.6.3",
- "vue-demi": "^0.14.10"
- },
- "funding": {
- "url": "https://github.com/sponsors/posva"
- },
- "peerDependencies": {
- "typescript": ">=4.4.4",
- "vue": "^2.7.0 || ^3.5.11"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/pirates": {
- "version": "4.0.7",
- "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz",
- "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/pngjs": {
- "version": "5.0.0",
- "resolved": "https://registry.npmmirror.com/pngjs/-/pngjs-5.0.0.tgz",
- "integrity": "sha512-40QW5YalBNfQo5yRYmiw7Yz6TKKVr3h6970B2YE+3fQpsWcrbj1PzJgxeJ19DRQjhMbKPIuMY8rFaXc8moolVw==",
- "license": "MIT",
- "engines": {
- "node": ">=10.13.0"
- }
- },
- "node_modules/polished": {
- "version": "4.3.1",
- "resolved": "https://registry.npmjs.org/polished/-/polished-4.3.1.tgz",
- "integrity": "sha512-OBatVyC/N7SCW/FaDHrSd+vn0o5cS855TOmYi4OkdWUMSJCET/xip//ch8xGUvtr3i44X9LVyWwQlRMTN3pwSA==",
- "license": "MIT",
- "dependencies": {
- "@babel/runtime": "^7.17.8"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/postcss": {
- "version": "8.5.6",
- "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz",
- "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==",
- "funding": [
- {
- "type": "opencollective",
- "url": "https://opencollective.com/postcss/"
- },
- {
- "type": "tidelift",
- "url": "https://tidelift.com/funding/github/npm/postcss"
- },
- {
- "type": "github",
- "url": "https://github.com/sponsors/ai"
- }
- ],
- "license": "MIT",
- "dependencies": {
- "nanoid": "^3.3.11",
- "picocolors": "^1.1.1",
- "source-map-js": "^1.2.1"
- },
- "engines": {
- "node": "^10 || ^12 || >=14"
- }
- },
- "node_modules/postcss-import": {
- "version": "15.1.0",
- "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz",
- "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "postcss-value-parser": "^4.0.0",
- "read-cache": "^1.0.0",
- "resolve": "^1.1.7"
- },
- "engines": {
- "node": ">=14.0.0"
- },
- "peerDependencies": {
- "postcss": "^8.0.0"
- }
- },
- "node_modules/postcss-js": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.1.0.tgz",
- "integrity": "sha512-oIAOTqgIo7q2EOwbhb8UalYePMvYoIeRY2YKntdpFQXNosSu3vLrniGgmH9OKs/qAkfoj5oB3le/7mINW1LCfw==",
- "dev": true,
- "funding": [
- {
- "type": "opencollective",
- "url": "https://opencollective.com/postcss/"
- },
- {
- "type": "github",
- "url": "https://github.com/sponsors/ai"
- }
- ],
- "license": "MIT",
- "dependencies": {
- "camelcase-css": "^2.0.1"
- },
- "engines": {
- "node": "^12 || ^14 || >= 16"
- },
- "peerDependencies": {
- "postcss": "^8.4.21"
- }
- },
- "node_modules/postcss-load-config": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-6.0.1.tgz",
- "integrity": "sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==",
- "dev": true,
- "funding": [
- {
- "type": "opencollective",
- "url": "https://opencollective.com/postcss/"
- },
- {
- "type": "github",
- "url": "https://github.com/sponsors/ai"
- }
- ],
- "license": "MIT",
- "dependencies": {
- "lilconfig": "^3.1.1"
- },
- "engines": {
- "node": ">= 18"
- },
- "peerDependencies": {
- "jiti": ">=1.21.0",
- "postcss": ">=8.0.9",
- "tsx": "^4.8.1",
- "yaml": "^2.4.2"
- },
- "peerDependenciesMeta": {
- "jiti": {
- "optional": true
- },
- "postcss": {
- "optional": true
- },
- "tsx": {
- "optional": true
- },
- "yaml": {
- "optional": true
- }
- }
- },
- "node_modules/postcss-nested": {
- "version": "6.2.0",
- "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.2.0.tgz",
- "integrity": "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==",
- "dev": true,
- "funding": [
- {
- "type": "opencollective",
- "url": "https://opencollective.com/postcss/"
- },
- {
- "type": "github",
- "url": "https://github.com/sponsors/ai"
- }
- ],
- "license": "MIT",
- "dependencies": {
- "postcss-selector-parser": "^6.1.1"
- },
- "engines": {
- "node": ">=12.0"
- },
- "peerDependencies": {
- "postcss": "^8.2.14"
- }
- },
- "node_modules/postcss-selector-parser": {
- "version": "6.1.2",
- "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz",
- "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "cssesc": "^3.0.0",
- "util-deprecate": "^1.0.2"
- },
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/postcss-value-parser": {
- "version": "4.2.0",
- "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz",
- "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/prelude-ls": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
- "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 0.8.0"
- }
- },
- "node_modules/proto-list": {
- "version": "1.2.4",
- "resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz",
- "integrity": "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==",
- "dev": true,
- "license": "ISC"
- },
- "node_modules/proxy-from-env": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
- "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
- "license": "MIT"
- },
- "node_modules/psl": {
- "version": "1.15.0",
- "resolved": "https://registry.npmjs.org/psl/-/psl-1.15.0.tgz",
- "integrity": "sha512-JZd3gMVBAVQkSs6HdNZo9Sdo0LNcQeMNP3CozBJb3JYC/QUYZTnKxP+f8oWRX4rHP5EurWxqAHTSwUCjlNKa1w==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "punycode": "^2.3.1"
- },
- "funding": {
- "url": "https://github.com/sponsors/lupomontero"
- }
- },
- "node_modules/punycode": {
- "version": "2.3.1",
- "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
- "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/qrcode": {
- "version": "1.5.4",
- "resolved": "https://registry.npmmirror.com/qrcode/-/qrcode-1.5.4.tgz",
- "integrity": "sha512-1ca71Zgiu6ORjHqFBDpnSMTR2ReToX4l1Au1VFLyVeBTFavzQnv5JxMFr3ukHVKpSrSA2MCk0lNJSykjUfz7Zg==",
- "license": "MIT",
- "dependencies": {
- "dijkstrajs": "^1.0.1",
- "pngjs": "^5.0.0",
- "yargs": "^15.3.1"
- },
- "bin": {
- "qrcode": "bin/qrcode"
- },
- "engines": {
- "node": ">=10.13.0"
- }
- },
- "node_modules/querystringify": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
- "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/queue-microtask": {
- "version": "1.2.3",
- "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
- "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
- "dev": true,
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "license": "MIT"
- },
- "node_modules/react-is": {
- "version": "18.3.1",
- "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz",
- "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==",
- "license": "MIT"
- },
- "node_modules/read-cache": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz",
- "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "pify": "^2.3.0"
- }
- },
- "node_modules/readdirp": {
- "version": "3.6.0",
- "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
- "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "picomatch": "^2.2.1"
- },
- "engines": {
- "node": ">=8.10.0"
- }
- },
- "node_modules/require-directory": {
- "version": "2.1.1",
- "resolved": "https://registry.npmmirror.com/require-directory/-/require-directory-2.1.1.tgz",
- "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
- "license": "MIT",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/require-main-filename": {
- "version": "2.0.0",
- "resolved": "https://registry.npmmirror.com/require-main-filename/-/require-main-filename-2.0.0.tgz",
- "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==",
- "license": "ISC"
- },
- "node_modules/requires-port": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
- "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/resolve": {
- "version": "1.22.11",
- "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz",
- "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==",
- "license": "MIT",
- "dependencies": {
- "is-core-module": "^2.16.1",
- "path-parse": "^1.0.7",
- "supports-preserve-symlinks-flag": "^1.0.0"
- },
- "bin": {
- "resolve": "bin/resolve"
- },
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/resolve-from": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
- "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==",
- "license": "MIT",
- "engines": {
- "node": ">=4"
- }
- },
- "node_modules/reusify": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz",
- "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "iojs": ">=1.0.0",
- "node": ">=0.10.0"
- }
- },
- "node_modules/rimraf": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
- "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
- "deprecated": "Rimraf versions prior to v4 are no longer supported",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "glob": "^7.1.3"
- },
- "bin": {
- "rimraf": "bin.js"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/rollup": {
- "version": "4.55.1",
- "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.55.1.tgz",
- "integrity": "sha512-wDv/Ht1BNHB4upNbK74s9usvl7hObDnvVzknxqY/E/O3X6rW1U1rV1aENEfJ54eFZDTNo7zv1f5N4edCluH7+A==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@types/estree": "1.0.8"
- },
- "bin": {
- "rollup": "dist/bin/rollup"
- },
- "engines": {
- "node": ">=18.0.0",
- "npm": ">=8.0.0"
- },
- "optionalDependencies": {
- "@rollup/rollup-android-arm-eabi": "4.55.1",
- "@rollup/rollup-android-arm64": "4.55.1",
- "@rollup/rollup-darwin-arm64": "4.55.1",
- "@rollup/rollup-darwin-x64": "4.55.1",
- "@rollup/rollup-freebsd-arm64": "4.55.1",
- "@rollup/rollup-freebsd-x64": "4.55.1",
- "@rollup/rollup-linux-arm-gnueabihf": "4.55.1",
- "@rollup/rollup-linux-arm-musleabihf": "4.55.1",
- "@rollup/rollup-linux-arm64-gnu": "4.55.1",
- "@rollup/rollup-linux-arm64-musl": "4.55.1",
- "@rollup/rollup-linux-loong64-gnu": "4.55.1",
- "@rollup/rollup-linux-loong64-musl": "4.55.1",
- "@rollup/rollup-linux-ppc64-gnu": "4.55.1",
- "@rollup/rollup-linux-ppc64-musl": "4.55.1",
- "@rollup/rollup-linux-riscv64-gnu": "4.55.1",
- "@rollup/rollup-linux-riscv64-musl": "4.55.1",
- "@rollup/rollup-linux-s390x-gnu": "4.55.1",
- "@rollup/rollup-linux-x64-gnu": "4.55.1",
- "@rollup/rollup-linux-x64-musl": "4.55.1",
- "@rollup/rollup-openbsd-x64": "4.55.1",
- "@rollup/rollup-openharmony-arm64": "4.55.1",
- "@rollup/rollup-win32-arm64-msvc": "4.55.1",
- "@rollup/rollup-win32-ia32-msvc": "4.55.1",
- "@rollup/rollup-win32-x64-gnu": "4.55.1",
- "@rollup/rollup-win32-x64-msvc": "4.55.1",
- "fsevents": "~2.3.2"
- }
- },
- "node_modules/rrweb-cssom": {
- "version": "0.7.1",
- "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.7.1.tgz",
- "integrity": "sha512-TrEMa7JGdVm0UThDJSx7ddw5nVm3UJS9o9CCIZ72B1vSyEZoziDqBYP3XIoi/12lKrJR8rE3jeFHMok2F/Mnsg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/run-parallel": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
- "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
- "dev": true,
- "funding": [
- {
- "type": "github",
- "url": "https://github.com/sponsors/feross"
- },
- {
- "type": "patreon",
- "url": "https://www.patreon.com/feross"
- },
- {
- "type": "consulting",
- "url": "https://feross.org/support"
- }
- ],
- "license": "MIT",
- "dependencies": {
- "queue-microtask": "^1.2.2"
- }
- },
- "node_modules/safer-buffer": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
- "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/saxes": {
- "version": "6.0.0",
- "resolved": "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz",
- "integrity": "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "xmlchars": "^2.2.0"
- },
- "engines": {
- "node": ">=v12.22.7"
- }
- },
- "node_modules/semver": {
- "version": "7.7.3",
- "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz",
- "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==",
- "dev": true,
- "license": "ISC",
- "bin": {
- "semver": "bin/semver.js"
- },
- "engines": {
- "node": ">=10"
- }
- },
- "node_modules/set-blocking": {
- "version": "2.0.0",
- "resolved": "https://registry.npmmirror.com/set-blocking/-/set-blocking-2.0.0.tgz",
- "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==",
- "license": "ISC"
- },
- "node_modules/shebang-command": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
- "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "shebang-regex": "^3.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/shebang-regex": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
- "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/siginfo": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz",
- "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==",
- "dev": true,
- "license": "ISC"
- },
- "node_modules/signal-exit": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
- "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==",
- "dev": true,
- "license": "ISC",
- "engines": {
- "node": ">=14"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/slash": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
- "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/source-map": {
- "version": "0.5.7",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz",
- "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==",
- "license": "BSD-3-Clause",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/source-map-js": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
- "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==",
- "license": "BSD-3-Clause",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/ssf": {
- "version": "0.11.2",
- "resolved": "https://registry.npmjs.org/ssf/-/ssf-0.11.2.tgz",
- "integrity": "sha512-+idbmIXoYET47hH+d7dfm2epdOMUDjqcB4648sTZ+t2JwoyBFL/insLfB/racrDmsKB3diwsDA696pZMieAC5g==",
- "license": "Apache-2.0",
- "dependencies": {
- "frac": "~1.1.2"
- },
- "engines": {
- "node": ">=0.8"
- }
- },
- "node_modules/stackback": {
- "version": "0.0.2",
- "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz",
- "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/std-env": {
- "version": "3.10.0",
- "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz",
- "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/string-width": {
- "version": "5.1.2",
- "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
- "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "eastasianwidth": "^0.2.0",
- "emoji-regex": "^9.2.2",
- "strip-ansi": "^7.0.1"
- },
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/string-width-cjs": {
- "name": "string-width",
- "version": "4.2.3",
- "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
- "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "emoji-regex": "^8.0.0",
- "is-fullwidth-code-point": "^3.0.0",
- "strip-ansi": "^6.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/string-width-cjs/node_modules/emoji-regex": {
- "version": "8.0.0",
- "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
- "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/string-width/node_modules/ansi-regex": {
- "version": "6.2.2",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
- "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/ansi-regex?sponsor=1"
- }
- },
- "node_modules/string-width/node_modules/strip-ansi": {
- "version": "7.1.2",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
- "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ansi-regex": "^6.0.1"
- },
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/strip-ansi?sponsor=1"
- }
- },
- "node_modules/strip-ansi": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
- "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
- "license": "MIT",
- "dependencies": {
- "ansi-regex": "^5.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/strip-ansi-cjs": {
- "name": "strip-ansi",
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
- "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ansi-regex": "^5.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/strip-json-comments": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
- "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=8"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/stylis": {
- "version": "4.3.6",
- "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.6.tgz",
- "integrity": "sha512-yQ3rwFWRfwNUY7H5vpU0wfdkNSnvnJinhF9830Swlaxl03zsOjCfmX0ugac+3LtK0lYSgwL/KXc8oYL3mG4YFQ==",
- "license": "MIT"
- },
- "node_modules/sucrase": {
- "version": "3.35.1",
- "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.1.tgz",
- "integrity": "sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@jridgewell/gen-mapping": "^0.3.2",
- "commander": "^4.0.0",
- "lines-and-columns": "^1.1.6",
- "mz": "^2.7.0",
- "pirates": "^4.0.1",
- "tinyglobby": "^0.2.11",
- "ts-interface-checker": "^0.1.9"
- },
- "bin": {
- "sucrase": "bin/sucrase",
- "sucrase-node": "bin/sucrase-node"
- },
- "engines": {
- "node": ">=16 || 14 >=14.17"
- }
- },
- "node_modules/supports-color": {
- "version": "7.2.0",
- "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
- "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "has-flag": "^4.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/supports-preserve-symlinks-flag": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz",
- "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==",
- "license": "MIT",
- "engines": {
- "node": ">= 0.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/ljharb"
- }
- },
- "node_modules/symbol-tree": {
- "version": "3.2.4",
- "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz",
- "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/tailwindcss": {
- "version": "3.4.19",
- "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.19.tgz",
- "integrity": "sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@alloc/quick-lru": "^5.2.0",
- "arg": "^5.0.2",
- "chokidar": "^3.6.0",
- "didyoumean": "^1.2.2",
- "dlv": "^1.1.3",
- "fast-glob": "^3.3.2",
- "glob-parent": "^6.0.2",
- "is-glob": "^4.0.3",
- "jiti": "^1.21.7",
- "lilconfig": "^3.1.3",
- "micromatch": "^4.0.8",
- "normalize-path": "^3.0.0",
- "object-hash": "^3.0.0",
- "picocolors": "^1.1.1",
- "postcss": "^8.4.47",
- "postcss-import": "^15.1.0",
- "postcss-js": "^4.0.1",
- "postcss-load-config": "^4.0.2 || ^5.0 || ^6.0",
- "postcss-nested": "^6.2.0",
- "postcss-selector-parser": "^6.1.2",
- "resolve": "^1.22.8",
- "sucrase": "^3.35.0"
- },
- "bin": {
- "tailwind": "lib/cli.js",
- "tailwindcss": "lib/cli.js"
- },
- "engines": {
- "node": ">=14.0.0"
- }
- },
- "node_modules/test-exclude": {
- "version": "7.0.1",
- "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-7.0.1.tgz",
- "integrity": "sha512-pFYqmTw68LXVjeWJMST4+borgQP2AyMNbg1BpZh9LbyhUeNkeaPF9gzfPGUAnSMV3qPYdWUwDIjjCLiSDOl7vg==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "@istanbuljs/schema": "^0.1.2",
- "glob": "^10.4.1",
- "minimatch": "^9.0.4"
- },
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/test-exclude/node_modules/glob": {
- "version": "10.5.0",
- "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz",
- "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "foreground-child": "^3.1.0",
- "jackspeak": "^3.1.2",
- "minimatch": "^9.0.4",
- "minipass": "^7.1.2",
- "package-json-from-dist": "^1.0.0",
- "path-scurry": "^1.11.1"
- },
- "bin": {
- "glob": "dist/esm/bin.mjs"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/text-table": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
- "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/thenify": {
- "version": "3.3.1",
- "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz",
- "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "any-promise": "^1.0.0"
- }
- },
- "node_modules/thenify-all": {
- "version": "1.6.0",
- "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz",
- "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "thenify": ">= 3.1.0 < 4"
- },
- "engines": {
- "node": ">=0.8"
- }
- },
- "node_modules/tiny-invariant": {
- "version": "1.3.3",
- "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz",
- "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/tinybench": {
- "version": "2.9.0",
- "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz",
- "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/tinyexec": {
- "version": "0.3.2",
- "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz",
- "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/tinyglobby": {
- "version": "0.2.15",
- "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz",
- "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "fdir": "^6.5.0",
- "picomatch": "^4.0.3"
- },
- "engines": {
- "node": ">=12.0.0"
- },
- "funding": {
- "url": "https://github.com/sponsors/SuperchupuDev"
- }
- },
- "node_modules/tinyglobby/node_modules/picomatch": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
- "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/sponsors/jonschlinkert"
- }
- },
- "node_modules/tinypool": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.1.tgz",
- "integrity": "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": "^18.0.0 || >=20.0.0"
- }
- },
- "node_modules/tinyrainbow": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-1.2.0.tgz",
- "integrity": "sha512-weEDEq7Z5eTHPDh4xjX789+fHfF+P8boiFB+0vbWzpbnbsEr/GRaohi/uMKxg8RZMXnl1ItAi/IUHWMsjDV7kQ==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=14.0.0"
- }
- },
- "node_modules/tinyspy": {
- "version": "3.0.2",
- "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-3.0.2.tgz",
- "integrity": "sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=14.0.0"
- }
- },
- "node_modules/to-regex-range": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
- "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "is-number": "^7.0.0"
- },
- "engines": {
- "node": ">=8.0"
- }
- },
- "node_modules/tough-cookie": {
- "version": "4.1.4",
- "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.4.tgz",
- "integrity": "sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag==",
- "dev": true,
- "license": "BSD-3-Clause",
- "dependencies": {
- "psl": "^1.1.33",
- "punycode": "^2.1.1",
- "universalify": "^0.2.0",
- "url-parse": "^1.5.3"
- },
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/tr46": {
- "version": "5.1.1",
- "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz",
- "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "punycode": "^2.3.1"
- },
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/ts-api-utils": {
- "version": "1.4.3",
- "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.4.3.tgz",
- "integrity": "sha512-i3eMG77UTMD0hZhgRS562pv83RC6ukSAC2GMNWc+9dieh/+jDM5u5YG+NHX6VNDRHQcHwmsTHctP9LhbC3WxVw==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=16"
- },
- "peerDependencies": {
- "typescript": ">=4.2.0"
- }
- },
- "node_modules/ts-interface-checker": {
- "version": "0.1.13",
- "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz",
- "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==",
- "dev": true,
- "license": "Apache-2.0"
- },
- "node_modules/type-check": {
- "version": "0.4.0",
- "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
- "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "prelude-ls": "^1.2.1"
- },
- "engines": {
- "node": ">= 0.8.0"
- }
- },
- "node_modules/type-fest": {
- "version": "0.20.2",
- "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
- "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==",
- "dev": true,
- "license": "(MIT OR CC0-1.0)",
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/typescript": {
- "version": "5.6.3",
- "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.3.tgz",
- "integrity": "sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==",
- "dev": true,
- "license": "Apache-2.0",
- "bin": {
- "tsc": "bin/tsc",
- "tsserver": "bin/tsserver"
- },
- "engines": {
- "node": ">=14.17"
- }
- },
- "node_modules/undici-types": {
- "version": "6.21.0",
- "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
- "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/unicorn-magic": {
- "version": "0.3.0",
- "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz",
- "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=18"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/universalify": {
- "version": "0.2.0",
- "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
- "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 4.0.0"
- }
- },
- "node_modules/update-browserslist-db": {
- "version": "1.2.3",
- "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz",
- "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==",
- "dev": true,
- "funding": [
- {
- "type": "opencollective",
- "url": "https://opencollective.com/browserslist"
- },
- {
- "type": "tidelift",
- "url": "https://tidelift.com/funding/github/npm/browserslist"
- },
- {
- "type": "github",
- "url": "https://github.com/sponsors/ai"
- }
- ],
- "license": "MIT",
- "dependencies": {
- "escalade": "^3.2.0",
- "picocolors": "^1.1.1"
- },
- "bin": {
- "update-browserslist-db": "cli.js"
- },
- "peerDependencies": {
- "browserslist": ">= 4.21.0"
- }
- },
- "node_modules/uri-js": {
- "version": "4.4.1",
- "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
- "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
- "dev": true,
- "license": "BSD-2-Clause",
- "dependencies": {
- "punycode": "^2.1.0"
- }
- },
- "node_modules/url-parse": {
- "version": "1.5.10",
- "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
- "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "querystringify": "^2.1.1",
- "requires-port": "^1.0.0"
- }
- },
- "node_modules/use-merge-value": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/use-merge-value/-/use-merge-value-1.2.0.tgz",
- "integrity": "sha512-DXgG0kkgJN45TcyoXL49vJnn55LehnrmoHc7MbKi+QDBvr8dsesqws8UlyIWGHMR+JXgxc1nvY+jDGMlycsUcw==",
- "license": "MIT",
- "peerDependencies": {
- "react": ">= 16.x"
- }
- },
- "node_modules/util-deprecate": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
- "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/vite": {
- "version": "5.4.21",
- "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz",
- "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "esbuild": "^0.21.3",
- "postcss": "^8.4.43",
- "rollup": "^4.20.0"
- },
- "bin": {
- "vite": "bin/vite.js"
- },
- "engines": {
- "node": "^18.0.0 || >=20.0.0"
- },
- "funding": {
- "url": "https://github.com/vitejs/vite?sponsor=1"
- },
- "optionalDependencies": {
- "fsevents": "~2.3.3"
- },
- "peerDependencies": {
- "@types/node": "^18.0.0 || >=20.0.0",
- "less": "*",
- "lightningcss": "^1.21.0",
- "sass": "*",
- "sass-embedded": "*",
- "stylus": "*",
- "sugarss": "*",
- "terser": "^5.4.0"
- },
- "peerDependenciesMeta": {
- "@types/node": {
- "optional": true
- },
- "less": {
- "optional": true
- },
- "lightningcss": {
- "optional": true
- },
- "sass": {
- "optional": true
- },
- "sass-embedded": {
- "optional": true
- },
- "stylus": {
- "optional": true
- },
- "sugarss": {
- "optional": true
- },
- "terser": {
- "optional": true
- }
- }
- },
- "node_modules/vite-node": {
- "version": "2.1.9",
- "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-2.1.9.tgz",
- "integrity": "sha512-AM9aQ/IPrW/6ENLQg3AGY4K1N2TGZdR5e4gu/MmmR2xR3Ll1+dib+nook92g4TV3PXVyeyxdWwtaCAiUL0hMxA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "cac": "^6.7.14",
- "debug": "^4.3.7",
- "es-module-lexer": "^1.5.4",
- "pathe": "^1.1.2",
- "vite": "^5.0.0"
- },
- "bin": {
- "vite-node": "vite-node.mjs"
- },
- "engines": {
- "node": "^18.0.0 || >=20.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/vitest"
- }
- },
- "node_modules/vite-plugin-checker": {
- "version": "0.9.3",
- "resolved": "https://registry.npmjs.org/vite-plugin-checker/-/vite-plugin-checker-0.9.3.tgz",
- "integrity": "sha512-Tf7QBjeBtG7q11zG0lvoF38/2AVUzzhMNu+Wk+mcsJ00Rk/FpJ4rmUviVJpzWkagbU13cGXvKpt7CMiqtxVTbQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@babel/code-frame": "^7.27.1",
- "chokidar": "^4.0.3",
- "npm-run-path": "^6.0.0",
- "picocolors": "^1.1.1",
- "picomatch": "^4.0.2",
- "strip-ansi": "^7.1.0",
- "tiny-invariant": "^1.3.3",
- "tinyglobby": "^0.2.13",
- "vscode-uri": "^3.1.0"
- },
- "engines": {
- "node": ">=14.16"
- },
- "peerDependencies": {
- "@biomejs/biome": ">=1.7",
- "eslint": ">=7",
- "meow": "^13.2.0",
- "optionator": "^0.9.4",
- "stylelint": ">=16",
- "typescript": "*",
- "vite": ">=2.0.0",
- "vls": "*",
- "vti": "*",
- "vue-tsc": "~2.2.10"
- },
- "peerDependenciesMeta": {
- "@biomejs/biome": {
- "optional": true
- },
- "eslint": {
- "optional": true
- },
- "meow": {
- "optional": true
- },
- "optionator": {
- "optional": true
- },
- "stylelint": {
- "optional": true
- },
- "typescript": {
- "optional": true
- },
- "vls": {
- "optional": true
- },
- "vti": {
- "optional": true
- },
- "vue-tsc": {
- "optional": true
- }
- }
- },
- "node_modules/vite-plugin-checker/node_modules/ansi-regex": {
- "version": "6.2.2",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
- "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/ansi-regex?sponsor=1"
- }
- },
- "node_modules/vite-plugin-checker/node_modules/chokidar": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz",
- "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "readdirp": "^4.0.1"
- },
- "engines": {
- "node": ">= 14.16.0"
- },
- "funding": {
- "url": "https://paulmillr.com/funding/"
- }
- },
- "node_modules/vite-plugin-checker/node_modules/picomatch": {
- "version": "4.0.3",
- "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
- "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/sponsors/jonschlinkert"
- }
- },
- "node_modules/vite-plugin-checker/node_modules/readdirp": {
- "version": "4.1.2",
- "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz",
- "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">= 14.18.0"
- },
- "funding": {
- "type": "individual",
- "url": "https://paulmillr.com/funding/"
- }
- },
- "node_modules/vite-plugin-checker/node_modules/strip-ansi": {
- "version": "7.1.2",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
- "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ansi-regex": "^6.0.1"
- },
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/strip-ansi?sponsor=1"
- }
- },
- "node_modules/vitest": {
- "version": "2.1.9",
- "resolved": "https://registry.npmjs.org/vitest/-/vitest-2.1.9.tgz",
- "integrity": "sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@vitest/expect": "2.1.9",
- "@vitest/mocker": "2.1.9",
- "@vitest/pretty-format": "^2.1.9",
- "@vitest/runner": "2.1.9",
- "@vitest/snapshot": "2.1.9",
- "@vitest/spy": "2.1.9",
- "@vitest/utils": "2.1.9",
- "chai": "^5.1.2",
- "debug": "^4.3.7",
- "expect-type": "^1.1.0",
- "magic-string": "^0.30.12",
- "pathe": "^1.1.2",
- "std-env": "^3.8.0",
- "tinybench": "^2.9.0",
- "tinyexec": "^0.3.1",
- "tinypool": "^1.0.1",
- "tinyrainbow": "^1.2.0",
- "vite": "^5.0.0",
- "vite-node": "2.1.9",
- "why-is-node-running": "^2.3.0"
- },
- "bin": {
- "vitest": "vitest.mjs"
- },
- "engines": {
- "node": "^18.0.0 || >=20.0.0"
- },
- "funding": {
- "url": "https://opencollective.com/vitest"
- },
- "peerDependencies": {
- "@edge-runtime/vm": "*",
- "@types/node": "^18.0.0 || >=20.0.0",
- "@vitest/browser": "2.1.9",
- "@vitest/ui": "2.1.9",
- "happy-dom": "*",
- "jsdom": "*"
- },
- "peerDependenciesMeta": {
- "@edge-runtime/vm": {
- "optional": true
- },
- "@types/node": {
- "optional": true
- },
- "@vitest/browser": {
- "optional": true
- },
- "@vitest/ui": {
- "optional": true
- },
- "happy-dom": {
- "optional": true
- },
- "jsdom": {
- "optional": true
- }
- }
- },
- "node_modules/vscode-uri": {
- "version": "3.1.0",
- "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.1.0.tgz",
- "integrity": "sha512-/BpdSx+yCQGnCvecbyXdxHDkuk55/G3xwnC0GqY4gmQ3j+A+g8kzzgB4Nk/SINjqn6+waqw3EgbVF2QKExkRxQ==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/vue": {
- "version": "3.5.26",
- "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.26.tgz",
- "integrity": "sha512-SJ/NTccVyAoNUJmkM9KUqPcYlY+u8OVL1X5EW9RIs3ch5H2uERxyyIUI4MRxVCSOiEcupX9xNGde1tL9ZKpimA==",
- "license": "MIT",
- "dependencies": {
- "@vue/compiler-dom": "3.5.26",
- "@vue/compiler-sfc": "3.5.26",
- "@vue/runtime-dom": "3.5.26",
- "@vue/server-renderer": "3.5.26",
- "@vue/shared": "3.5.26"
- },
- "peerDependencies": {
- "typescript": "*"
- },
- "peerDependenciesMeta": {
- "typescript": {
- "optional": true
- }
- }
- },
- "node_modules/vue-chartjs": {
- "version": "5.3.3",
- "resolved": "https://registry.npmjs.org/vue-chartjs/-/vue-chartjs-5.3.3.tgz",
- "integrity": "sha512-jqxtL8KZ6YJ5NTv6XzrzLS7osyegOi28UGNZW0h9OkDL7Sh1396ht4Dorh04aKrl2LiSalQ84WtqiG0RIJb0tA==",
- "license": "MIT",
- "peerDependencies": {
- "chart.js": "^4.1.1",
- "vue": "^3.0.0-0 || ^2.7.0"
- }
- },
- "node_modules/vue-component-type-helpers": {
- "version": "2.2.12",
- "resolved": "https://registry.npmjs.org/vue-component-type-helpers/-/vue-component-type-helpers-2.2.12.tgz",
- "integrity": "sha512-YbGqHZ5/eW4SnkPNR44mKVc6ZKQoRs/Rux1sxC6rdwXb4qpbOSYfDr9DsTHolOTGmIKgM9j141mZbBeg05R1pw==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/vue-demi": {
- "version": "0.14.10",
- "resolved": "https://registry.npmjs.org/vue-demi/-/vue-demi-0.14.10.tgz",
- "integrity": "sha512-nMZBOwuzabUO0nLgIcc6rycZEebF6eeUfaiQx9+WSk8e29IbLvPU9feI6tqW4kTo3hvoYAJkMh8n8D0fuISphg==",
- "hasInstallScript": true,
- "license": "MIT",
- "bin": {
- "vue-demi-fix": "bin/vue-demi-fix.js",
- "vue-demi-switch": "bin/vue-demi-switch.js"
- },
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/sponsors/antfu"
- },
- "peerDependencies": {
- "@vue/composition-api": "^1.0.0-rc.1",
- "vue": "^3.0.0-0 || ^2.6.0"
- },
- "peerDependenciesMeta": {
- "@vue/composition-api": {
- "optional": true
- }
- }
- },
- "node_modules/vue-eslint-parser": {
- "version": "9.4.3",
- "resolved": "https://registry.npmjs.org/vue-eslint-parser/-/vue-eslint-parser-9.4.3.tgz",
- "integrity": "sha512-2rYRLWlIpaiN8xbPiDyXZXRgLGOtWxERV7ND5fFAv5qo1D2N9Fu9MNajBNc6o13lZ+24DAWCkQCvj4klgmcITg==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "debug": "^4.3.4",
- "eslint-scope": "^7.1.1",
- "eslint-visitor-keys": "^3.3.0",
- "espree": "^9.3.1",
- "esquery": "^1.4.0",
- "lodash": "^4.17.21",
- "semver": "^7.3.6"
- },
- "engines": {
- "node": "^14.17.0 || >=16.0.0"
- },
- "funding": {
- "url": "https://github.com/sponsors/mysticatea"
- },
- "peerDependencies": {
- "eslint": ">=6.0.0"
- }
- },
- "node_modules/vue-i18n": {
- "version": "9.14.5",
- "resolved": "https://registry.npmjs.org/vue-i18n/-/vue-i18n-9.14.5.tgz",
- "integrity": "sha512-0jQ9Em3ymWngyiIkj0+c/k7WgaPO+TNzjKSNq9BvBQaKJECqn9cd9fL4tkDhB5G1QBskGl9YxxbDAhgbFtpe2g==",
- "deprecated": "v9 and v10 no longer supported. please migrate to v11. about maintenance status, see https://vue-i18n.intlify.dev/guide/maintenance.html",
- "license": "MIT",
- "dependencies": {
- "@intlify/core-base": "9.14.5",
- "@intlify/shared": "9.14.5",
- "@vue/devtools-api": "^6.5.0"
- },
- "engines": {
- "node": ">= 16"
- },
- "funding": {
- "url": "https://github.com/sponsors/kazupon"
- },
- "peerDependencies": {
- "vue": "^3.0.0"
- }
- },
- "node_modules/vue-router": {
- "version": "4.6.4",
- "resolved": "https://registry.npmjs.org/vue-router/-/vue-router-4.6.4.tgz",
- "integrity": "sha512-Hz9q5sa33Yhduglwz6g9skT8OBPii+4bFn88w6J+J4MfEo4KRRpmiNG/hHHkdbRFlLBOqxN8y8gf2Fb0MTUgVg==",
- "license": "MIT",
- "dependencies": {
- "@vue/devtools-api": "^6.6.4"
- },
- "funding": {
- "url": "https://github.com/sponsors/posva"
- },
- "peerDependencies": {
- "vue": "^3.5.0"
- }
- },
- "node_modules/vue-tsc": {
- "version": "2.2.12",
- "resolved": "https://registry.npmjs.org/vue-tsc/-/vue-tsc-2.2.12.tgz",
- "integrity": "sha512-P7OP77b2h/Pmk+lZdJ0YWs+5tJ6J2+uOQPo7tlBnY44QqQSPYvS0qVT4wqDJgwrZaLe47etJLLQRFia71GYITw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@volar/typescript": "2.4.15",
- "@vue/language-core": "2.2.12"
- },
- "bin": {
- "vue-tsc": "bin/vue-tsc.js"
- },
- "peerDependencies": {
- "typescript": ">=5.0.0"
- }
- },
- "node_modules/w3c-xmlserializer": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz",
- "integrity": "sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "xml-name-validator": "^5.0.0"
- },
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/w3c-xmlserializer/node_modules/xml-name-validator": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-5.0.0.tgz",
- "integrity": "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==",
- "dev": true,
- "license": "Apache-2.0",
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/webidl-conversions": {
- "version": "7.0.0",
- "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
- "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
- "dev": true,
- "license": "BSD-2-Clause",
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/whatwg-encoding": {
- "version": "3.1.1",
- "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz",
- "integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==",
- "deprecated": "Use @exodus/bytes instead for a more spec-conformant and faster implementation",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "iconv-lite": "0.6.3"
- },
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/whatwg-mimetype": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz",
- "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/whatwg-url": {
- "version": "14.2.0",
- "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz",
- "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "tr46": "^5.1.0",
- "webidl-conversions": "^7.0.0"
- },
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/which": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
- "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
- "dev": true,
- "license": "ISC",
- "dependencies": {
- "isexe": "^2.0.0"
- },
- "bin": {
- "node-which": "bin/node-which"
- },
- "engines": {
- "node": ">= 8"
- }
- },
- "node_modules/which-module": {
- "version": "2.0.1",
- "resolved": "https://registry.npmmirror.com/which-module/-/which-module-2.0.1.tgz",
- "integrity": "sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==",
- "license": "ISC"
- },
- "node_modules/why-is-node-running": {
- "version": "2.3.0",
- "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz",
- "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "siginfo": "^2.0.0",
- "stackback": "0.0.2"
- },
- "bin": {
- "why-is-node-running": "cli.js"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/wmf": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/wmf/-/wmf-1.0.2.tgz",
- "integrity": "sha512-/p9K7bEh0Dj6WbXg4JG0xvLQmIadrner1bi45VMJTfnbVHsc7yIajZyoSoK60/dtVBs12Fm6WkUI5/3WAVsNMw==",
- "license": "Apache-2.0",
- "engines": {
- "node": ">=0.8"
- }
- },
- "node_modules/word": {
- "version": "0.3.0",
- "resolved": "https://registry.npmjs.org/word/-/word-0.3.0.tgz",
- "integrity": "sha512-OELeY0Q61OXpdUfTp+oweA/vtLVg5VDOXh+3he3PNzLGG/y0oylSOC1xRVj0+l4vQ3tj/bB1HVHv1ocXkQceFA==",
- "license": "Apache-2.0",
- "engines": {
- "node": ">=0.8"
- }
- },
- "node_modules/word-wrap": {
- "version": "1.2.5",
- "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz",
- "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=0.10.0"
- }
- },
- "node_modules/wrap-ansi": {
- "version": "8.1.0",
- "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz",
- "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ansi-styles": "^6.1.0",
- "string-width": "^5.0.1",
- "strip-ansi": "^7.0.1"
- },
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
- }
- },
- "node_modules/wrap-ansi-cjs": {
- "name": "wrap-ansi",
- "version": "7.0.0",
- "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
- "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ansi-styles": "^4.0.0",
- "string-width": "^4.1.0",
- "strip-ansi": "^6.0.0"
- },
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
- }
- },
- "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": {
- "version": "8.0.0",
- "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
- "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/wrap-ansi-cjs/node_modules/string-width": {
- "version": "4.2.3",
- "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
- "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "emoji-regex": "^8.0.0",
- "is-fullwidth-code-point": "^3.0.0",
- "strip-ansi": "^6.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/wrap-ansi/node_modules/ansi-regex": {
- "version": "6.2.2",
- "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
- "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/ansi-regex?sponsor=1"
- }
- },
- "node_modules/wrap-ansi/node_modules/ansi-styles": {
- "version": "6.2.3",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
- "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/ansi-styles?sponsor=1"
- }
- },
- "node_modules/wrap-ansi/node_modules/strip-ansi": {
- "version": "7.1.2",
- "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
- "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ansi-regex": "^6.0.1"
- },
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/chalk/strip-ansi?sponsor=1"
- }
- },
- "node_modules/wrappy": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
- "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
- "dev": true,
- "license": "ISC"
- },
- "node_modules/ws": {
- "version": "8.19.0",
- "resolved": "https://registry.npmjs.org/ws/-/ws-8.19.0.tgz",
- "integrity": "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=10.0.0"
- },
- "peerDependencies": {
- "bufferutil": "^4.0.1",
- "utf-8-validate": ">=5.0.2"
- },
- "peerDependenciesMeta": {
- "bufferutil": {
- "optional": true
- },
- "utf-8-validate": {
- "optional": true
- }
- }
- },
- "node_modules/xlsx": {
- "version": "0.18.5",
- "resolved": "https://registry.npmjs.org/xlsx/-/xlsx-0.18.5.tgz",
- "integrity": "sha512-dmg3LCjBPHZnQp5/F/+nnTa+miPJxUXB6vtk42YjBBKayDNagxGEeIdWApkYPOf3Z3pm3k62Knjzp7lMeTEtFQ==",
- "license": "Apache-2.0",
- "dependencies": {
- "adler-32": "~1.3.0",
- "cfb": "~1.2.1",
- "codepage": "~1.15.0",
- "crc-32": "~1.2.1",
- "ssf": "~0.11.2",
- "wmf": "~1.0.1",
- "word": "~0.3.0"
- },
- "bin": {
- "xlsx": "bin/xlsx.njs"
- },
- "engines": {
- "node": ">=0.8"
- }
- },
- "node_modules/xml-name-validator": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-4.0.0.tgz",
- "integrity": "sha512-ICP2e+jsHvAj2E2lIHxa5tjXRlKDJo4IdvPvCXbXQGdzSfmSpNVyIKMvoZHjDY9DP0zV17iI85o90vRFXNccRw==",
- "dev": true,
- "license": "Apache-2.0",
- "engines": {
- "node": ">=12"
- }
- },
- "node_modules/xmlchars": {
- "version": "2.2.0",
- "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz",
- "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==",
- "dev": true,
- "license": "MIT"
- },
- "node_modules/y18n": {
- "version": "4.0.3",
- "resolved": "https://registry.npmmirror.com/y18n/-/y18n-4.0.3.tgz",
- "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==",
- "license": "ISC"
- },
- "node_modules/yaml": {
- "version": "1.10.2",
- "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz",
- "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==",
- "license": "ISC",
- "engines": {
- "node": ">= 6"
- }
- },
- "node_modules/yargs": {
- "version": "15.4.1",
- "resolved": "https://registry.npmmirror.com/yargs/-/yargs-15.4.1.tgz",
- "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==",
- "license": "MIT",
- "dependencies": {
- "cliui": "^6.0.0",
- "decamelize": "^1.2.0",
- "find-up": "^4.1.0",
- "get-caller-file": "^2.0.1",
- "require-directory": "^2.1.1",
- "require-main-filename": "^2.0.0",
- "set-blocking": "^2.0.0",
- "string-width": "^4.2.0",
- "which-module": "^2.0.0",
- "y18n": "^4.0.0",
- "yargs-parser": "^18.1.2"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/yargs-parser": {
- "version": "18.1.3",
- "resolved": "https://registry.npmmirror.com/yargs-parser/-/yargs-parser-18.1.3.tgz",
- "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==",
- "license": "ISC",
- "dependencies": {
- "camelcase": "^5.0.0",
- "decamelize": "^1.2.0"
- },
- "engines": {
- "node": ">=6"
- }
- },
- "node_modules/yargs/node_modules/emoji-regex": {
- "version": "8.0.0",
- "resolved": "https://registry.npmmirror.com/emoji-regex/-/emoji-regex-8.0.0.tgz",
- "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
- "license": "MIT"
- },
- "node_modules/yargs/node_modules/find-up": {
- "version": "4.1.0",
- "resolved": "https://registry.npmmirror.com/find-up/-/find-up-4.1.0.tgz",
- "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
- "license": "MIT",
- "dependencies": {
- "locate-path": "^5.0.0",
- "path-exists": "^4.0.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/yargs/node_modules/locate-path": {
- "version": "5.0.0",
- "resolved": "https://registry.npmmirror.com/locate-path/-/locate-path-5.0.0.tgz",
- "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
- "license": "MIT",
- "dependencies": {
- "p-locate": "^4.1.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/yargs/node_modules/p-limit": {
- "version": "2.3.0",
- "resolved": "https://registry.npmmirror.com/p-limit/-/p-limit-2.3.0.tgz",
- "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
- "license": "MIT",
- "dependencies": {
- "p-try": "^2.0.0"
- },
- "engines": {
- "node": ">=6"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/yargs/node_modules/p-locate": {
- "version": "4.1.0",
- "resolved": "https://registry.npmmirror.com/p-locate/-/p-locate-4.1.0.tgz",
- "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
- "license": "MIT",
- "dependencies": {
- "p-limit": "^2.2.0"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/yargs/node_modules/string-width": {
- "version": "4.2.3",
- "resolved": "https://registry.npmmirror.com/string-width/-/string-width-4.2.3.tgz",
- "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
- "license": "MIT",
- "dependencies": {
- "emoji-regex": "^8.0.0",
- "is-fullwidth-code-point": "^3.0.0",
- "strip-ansi": "^6.0.1"
- },
- "engines": {
- "node": ">=8"
- }
- },
- "node_modules/yocto-queue": {
- "version": "0.1.0",
- "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
- "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=10"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- }
- }
-}
diff --git a/frontend/package.json b/frontend/package.json
index 8e1fdb4b..38b92708 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -19,8 +19,10 @@
"@vueuse/core": "^10.7.0",
"axios": "^1.6.2",
"chart.js": "^4.4.1",
+ "dompurify": "^3.3.1",
"driver.js": "^1.4.0",
"file-saver": "^2.0.5",
+ "marked": "^17.0.1",
"pinia": "^2.1.7",
"qrcode": "^1.5.4",
"vue": "^3.4.0",
@@ -30,6 +32,7 @@
"xlsx": "^0.18.5"
},
"devDependencies": {
+ "@types/dompurify": "^3.0.5",
"@types/file-saver": "^2.0.7",
"@types/mdx": "^2.0.13",
"@types/node": "^20.10.5",
diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml
index df82dcdb..7dc73325 100644
--- a/frontend/pnpm-lock.yaml
+++ b/frontend/pnpm-lock.yaml
@@ -20,12 +20,18 @@ importers:
chart.js:
specifier: ^4.4.1
version: 4.5.1
+ dompurify:
+ specifier: ^3.3.1
+ version: 3.3.1
driver.js:
specifier: ^1.4.0
version: 1.4.0
file-saver:
specifier: ^2.0.5
version: 2.0.5
+ marked:
+ specifier: ^17.0.1
+ version: 17.0.1
pinia:
specifier: ^2.1.7
version: 2.3.1(typescript@5.6.3)(vue@3.5.26(typescript@5.6.3))
@@ -48,6 +54,9 @@ importers:
specifier: ^0.18.5
version: 0.18.5
devDependencies:
+ '@types/dompurify':
+ specifier: ^3.0.5
+ version: 3.2.0
'@types/file-saver':
specifier: ^2.0.7
version: 2.0.7
@@ -1460,6 +1469,10 @@ packages:
'@types/debug@4.1.12':
resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==}
+ '@types/dompurify@3.2.0':
+ resolution: {integrity: sha512-Fgg31wv9QbLDA0SpTOXO3MaxySc4DKGLi8sna4/Utjo4r3ZRPdCt4UQee8BWr+Q5z21yifghREPJGYaEOEIACg==}
+ deprecated: This is a stub types definition. dompurify provides its own type definitions, so you do not need this installed.
+
'@types/estree-jsx@1.0.5':
resolution: {integrity: sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==}
@@ -5901,6 +5914,10 @@ snapshots:
dependencies:
'@types/ms': 2.1.0
+ '@types/dompurify@3.2.0':
+ dependencies:
+ dompurify: 3.3.1
+
'@types/estree-jsx@1.0.5':
dependencies:
'@types/estree': 1.0.8
diff --git a/frontend/src/components/admin/announcements/AnnouncementTargetingEditor.vue b/frontend/src/components/admin/announcements/AnnouncementTargetingEditor.vue
index bd90af42..355f5399 100644
--- a/frontend/src/components/admin/announcements/AnnouncementTargetingEditor.vue
+++ b/frontend/src/components/admin/announcements/AnnouncementTargetingEditor.vue
@@ -323,6 +323,7 @@ function ensureSelectionPath(groupIndex: number, condIndex: number) {
if (!subscriptionSelections[groupIndex][condIndex]) subscriptionSelections[groupIndex][condIndex] = []
}
+// Sync from modelValue to subscriptionSelections (one-way: model -> local state)
watch(
() => props.modelValue,
(v) => {
@@ -333,20 +334,34 @@ watch(
const c = allOf[ci]
if (c?.type === 'subscription') {
ensureSelectionPath(gi, ci)
- subscriptionSelections[gi][ci] = (c.group_ids ?? []).slice()
+ // Only update if different to avoid triggering unnecessary updates
+ const newIds = (c.group_ids ?? []).slice()
+ const currentIds = subscriptionSelections[gi]?.[ci] ?? []
+ if (JSON.stringify(newIds.sort()) !== JSON.stringify(currentIds.sort())) {
+ subscriptionSelections[gi][ci] = newIds
+ }
}
}
}
},
- { immediate: true, deep: true }
+ { immediate: true }
)
+// Sync from subscriptionSelections to modelValue (one-way: local state -> model)
+// Use a debounced approach to avoid infinite loops
+let syncTimeout: ReturnType | null = null
watch(
() => subscriptionSelections,
() => {
- // sync back to targeting
- updateTargeting((draft) => {
- const groups = draft.any_of ?? []
+ // Debounce the sync to avoid rapid fire updates
+ if (syncTimeout) clearTimeout(syncTimeout)
+
+ syncTimeout = setTimeout(() => {
+ // Build the new targeting state
+ const newTargeting: TargetingDraft = JSON.parse(JSON.stringify(props.modelValue ?? { any_of: [] }))
+ if (!newTargeting.any_of) newTargeting.any_of = []
+
+ const groups = newTargeting.any_of ?? []
for (let gi = 0; gi < groups.length; gi++) {
const allOf = groups[gi]?.all_of ?? []
for (let ci = 0; ci < allOf.length; ci++) {
@@ -358,7 +373,12 @@ watch(
}
}
}
- })
+
+ // Only emit if there's an actual change (deep comparison)
+ if (JSON.stringify(props.modelValue) !== JSON.stringify(newTargeting)) {
+ emit('update:modelValue', newTargeting)
+ }
+ }, 0)
},
{ deep: true }
)
diff --git a/frontend/src/components/common/AnnouncementBell.vue b/frontend/src/components/common/AnnouncementBell.vue
new file mode 100644
index 00000000..9d00f9be
--- /dev/null
+++ b/frontend/src/components/common/AnnouncementBell.vue
@@ -0,0 +1,626 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {{ t('announcements.title') }}
+
+
+
+ {{ unreadCount }}
+ {{ t('announcements.unread') }}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {{ item.title }}
+
+
+
+
+
+
+
+
+ {{ t('announcements.unread') }}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
{{ t('announcements.empty') }}
+
{{ t('announcements.emptyDescription') }}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {{ t('announcements.title') }}
+
+
+
+
+
+
+ {{ t('announcements.unread') }}
+
+
+
+
+
+
+ {{ selectedAnnouncement.title }}
+
+
+
+
+
+
+
+
+
+
+
{{ selectedAnnouncement.read_at ? t('announcements.read') : t('announcements.unread') }}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
{{ selectedAnnouncement.read_at ? t('announcements.readStatus') : t('announcements.markReadHint') }}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/frontend/src/components/icons/Icon.vue b/frontend/src/components/icons/Icon.vue
index c8ab8aed..1f055111 100644
--- a/frontend/src/components/icons/Icon.vue
+++ b/frontend/src/components/icons/Icon.vue
@@ -107,6 +107,9 @@ const icons = {
database: 'M20.25 6.375c0 2.278-3.694 4.125-8.25 4.125S3.75 8.653 3.75 6.375m16.5 0c0-2.278-3.694-4.125-8.25-4.125S3.75 4.097 3.75 6.375m16.5 0v11.25c0 2.278-3.694 4.125-8.25 4.125s-8.25-1.847-8.25-4.125V6.375m16.5 0v3.75m-16.5-3.75v3.75m16.5 0v3.75C20.25 16.153 16.556 18 12 18s-8.25-1.847-8.25-4.125v-3.75m16.5 0c0 2.278-3.694 4.125-8.25 4.125s-8.25-1.847-8.25-4.125',
cube: 'M20 7l-8-4-8 4m16 0l-8 4m8-4v10l-8 4m0-10L4 7m8 4v10M4 7v10l8 4',
+ // Notification
+ bell: 'M15 17h5l-1.405-1.405A2.032 2.032 0 0118 14.158V11a6.002 6.002 0 00-4-5.659V5a2 2 0 10-4 0v.341C7.67 6.165 6 8.388 6 11v3.159c0 .538-.214 1.055-.595 1.436L4 17h5m6 0v1a3 3 0 11-6 0v-1m6 0H9',
+
// Misc
bolt: 'M13 10V3L4 14h7v7l9-11h-7z',
sparkles: 'M9.813 15.904L9 18.75l-.813-2.846a4.5 4.5 0 00-3.09-3.09L2.25 12l2.846-.813a4.5 4.5 0 003.09-3.09L9 5.25l.813 2.846a4.5 4.5 0 003.09 3.09L15.75 12l-2.846.813a4.5 4.5 0 00-3.09 3.09zM18.259 8.715L18 9.75l-.259-1.035a3.375 3.375 0 00-2.455-2.456L14.25 6l1.036-.259a3.375 3.375 0 002.455-2.456L18 2.25l.259 1.035a3.375 3.375 0 002.456 2.456L21.75 6l-1.035.259a3.375 3.375 0 00-2.456 2.456z',
diff --git a/frontend/src/components/layout/AppHeader.vue b/frontend/src/components/layout/AppHeader.vue
index 9d2b40fb..6b5849c0 100644
--- a/frontend/src/components/layout/AppHeader.vue
+++ b/frontend/src/components/layout/AppHeader.vue
@@ -21,8 +21,11 @@
-
+
+
+
+
{
const items = [
{ path: '/dashboard', label: t('nav.dashboard'), icon: DashboardIcon },
- { path: '/announcements', label: t('nav.announcements'), icon: BellIcon },
{ path: '/keys', label: t('nav.apiKeys'), icon: KeyIcon },
{ path: '/usage', label: t('nav.usage'), icon: ChartIcon, hideInSimpleMode: true },
{ path: '/subscriptions', label: t('nav.mySubscriptions'), icon: CreditCardIcon, hideInSimpleMode: true },
@@ -456,7 +455,6 @@ const userNavItems = computed(() => {
// Personal navigation items (for admin's "My Account" section, without Dashboard)
const personalNavItems = computed(() => {
const items = [
- { path: '/announcements', label: t('nav.announcements'), icon: BellIcon },
{ path: '/keys', label: t('nav.apiKeys'), icon: KeyIcon },
{ path: '/usage', label: t('nav.usage'), icon: ChartIcon, hideInSimpleMode: true },
{ path: '/subscriptions', label: t('nav.mySubscriptions'), icon: CreditCardIcon, hideInSimpleMode: true },
diff --git a/frontend/src/i18n/locales/en.ts b/frontend/src/i18n/locales/en.ts
index 61f487a8..bb7defd8 100644
--- a/frontend/src/i18n/locales/en.ts
+++ b/frontend/src/i18n/locales/en.ts
@@ -3139,13 +3139,22 @@ export default {
description: 'View system announcements',
unreadOnly: 'Show unread only',
markRead: 'Mark as read',
+ markAllRead: 'Mark all as read',
+ viewAll: 'View all announcements',
+ markedAsRead: 'Marked as read',
+ allMarkedAsRead: 'All announcements marked as read',
+ newCount: '{count} new announcement | {count} new announcements',
readAt: 'Read at',
read: 'Read',
unread: 'Unread',
startsAt: 'Starts at',
endsAt: 'Ends at',
empty: 'No announcements',
- emptyUnread: 'No unread announcements'
+ emptyUnread: 'No unread announcements',
+ total: 'announcements',
+ emptyDescription: 'There are no system announcements at this time',
+ readStatus: 'You have read this announcement',
+ markReadHint: 'Click "Mark as read" to mark this announcement'
},
// User Subscriptions Page
diff --git a/frontend/src/i18n/locales/zh.ts b/frontend/src/i18n/locales/zh.ts
index bf7806b5..2e6230b2 100644
--- a/frontend/src/i18n/locales/zh.ts
+++ b/frontend/src/i18n/locales/zh.ts
@@ -3288,13 +3288,22 @@ export default {
description: '查看系统公告',
unreadOnly: '仅显示未读',
markRead: '标记已读',
+ markAllRead: '全部已读',
+ viewAll: '查看全部公告',
+ markedAsRead: '已标记为已读',
+ allMarkedAsRead: '所有公告已标记为已读',
+ newCount: '有 {count} 条新公告',
readAt: '已读时间',
read: '已读',
unread: '未读',
startsAt: '开始时间',
endsAt: '结束时间',
empty: '暂无公告',
- emptyUnread: '暂无未读公告'
+ emptyUnread: '暂无未读公告',
+ total: '条公告',
+ emptyDescription: '暂时没有任何系统公告',
+ readStatus: '您已阅读此公告',
+ markReadHint: '点击"已读"标记此公告'
},
// User Subscriptions Page
diff --git a/frontend/src/router/index.ts b/frontend/src/router/index.ts
index 4e2c1147..4bb46cee 100644
--- a/frontend/src/router/index.ts
+++ b/frontend/src/router/index.ts
@@ -187,18 +187,6 @@ const routes: RouteRecordRaw[] = [
descriptionKey: 'purchase.description'
}
},
- {
- path: '/announcements',
- name: 'Announcements',
- component: () => import('@/views/user/AnnouncementsView.vue'),
- meta: {
- requiresAuth: true,
- requiresAdmin: false,
- title: 'Announcements',
- titleKey: 'announcements.title',
- descriptionKey: 'announcements.description'
- }
- },
// ==================== Admin Routes ====================
{
diff --git a/frontend/src/utils/format.ts b/frontend/src/utils/format.ts
index 78e45354..e6535c2e 100644
--- a/frontend/src/utils/format.ts
+++ b/frontend/src/utils/format.ts
@@ -261,3 +261,22 @@ export function formatCountdownWithSuffix(targetDate: string | Date | null | und
if (!countdown) return null
return i18n.global.t('common.time.countdown.withSuffix', { time: countdown })
}
+
+/**
+ * 格式化为相对时间 + 具体时间组合
+ * @param date 日期字符串或 Date 对象
+ * @returns 组合时间字符串,如 "5 天前 · 2026-01-27 15:25"
+ */
+export function formatRelativeWithDateTime(date: string | Date | null | undefined): string {
+ if (!date) return ''
+
+ const relativeTime = formatRelativeTime(date)
+ const dateTime = formatDateTime(date)
+
+ // 如果是 "从未" 或空字符串,只返回相对时间
+ if (!dateTime || relativeTime === i18n.global.t('common.time.never')) {
+ return relativeTime
+ }
+
+ return `${relativeTime} · ${dateTime}`
+}
diff --git a/frontend/src/views/user/AnnouncementsView.vue b/frontend/src/views/user/AnnouncementsView.vue
deleted file mode 100644
index 99ea253e..00000000
--- a/frontend/src/views/user/AnnouncementsView.vue
+++ /dev/null
@@ -1,140 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- {{ unreadOnly ? t('announcements.emptyUnread') : t('announcements.empty') }}
-
-
-
-
-
-
-
-
- {{ item.title }}
-
-
- {{ t('announcements.unread') }}
-
-
- {{ t('announcements.read') }}
-
-
-
- {{ formatDateTime(item.created_at) }}
-
- {{ t('announcements.startsAt') }}: {{ formatDateTime(item.starts_at) }}
-
-
- {{ t('announcements.endsAt') }}: {{ formatDateTime(item.ends_at) }}
-
-
-
-
-
-
-
- {{ t('announcements.readAt') }}: {{ formatDateTime(item.read_at) }}
-
-
-
-
-
- {{ item.content }}
-
-
-
-
-
-
-
-
-
From 5cda979209715e52dc02d459e9c7ded53a1a96ca Mon Sep 17 00:00:00 2001
From: shaw
Date: Mon, 2 Feb 2026 16:17:07 +0800
Subject: [PATCH 70/99] =?UTF-8?q?feat(deploy):=20=E4=BC=98=E5=8C=96=20Dock?=
=?UTF-8?q?er=20=E9=83=A8=E7=BD=B2=E4=BD=93=E9=AA=8C=EF=BC=8C=E6=96=B0?=
=?UTF-8?q?=E5=A2=9E=E4=B8=80=E9=94=AE=E9=83=A8=E7=BD=B2=E8=84=9A=E6=9C=AC?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
## 新增功能
- 新增 docker-compose.local.yml:使用本地目录存储数据,便于迁移和备份
- 新增 docker-deploy.sh:一键部署脚本,自动生成安全密钥(JWT_SECRET、TOTP_ENCRYPTION_KEY、POSTGRES_PASSWORD)
- 新增 deploy/.gitignore:忽略运行时数据目录
## 优化改进
- docker-compose.local.yml 包含 PGDATA 环境变量修复,解决 PostgreSQL 18 Alpine 数据丢失问题
- 脚本自动设置 .env 文件权限为 600,增强安全性
- 脚本显示生成的凭证,方便用户记录
## 文档更新
- 更新 README.md(英文版):新增"快速开始"章节,添加部署版本对比表
- 更新 README_CN.md(中文版):同步英文版更新
- 更新 deploy/README.md:详细说明两种部署方式和迁移方法
## 使用方式
一键部署:
```bash
curl -sSL https://raw.githubusercontent.com/Wei-Shaw/sub2api/main/deploy/docker-deploy.sh | bash
docker-compose -f docker-compose.local.yml up -d
```
轻松迁移:
```bash
tar czf sub2api-complete.tar.gz deploy/
# 传输到新服务器后直接解压启动即可
```
---
README.md | 128 +++++++++++++-----
README_CN.md | 128 +++++++++++++-----
deploy/.gitignore | 19 +++
deploy/README.md | 147 ++++++++++++++++++++-
deploy/docker-compose.local.yml | 222 ++++++++++++++++++++++++++++++++
deploy/docker-deploy.sh | 171 ++++++++++++++++++++++++
6 files changed, 750 insertions(+), 65 deletions(-)
create mode 100644 deploy/.gitignore
create mode 100644 deploy/docker-compose.local.yml
create mode 100644 deploy/docker-deploy.sh
diff --git a/README.md b/README.md
index e8e9c5a5..14656332 100644
--- a/README.md
+++ b/README.md
@@ -128,7 +128,7 @@ curl -sSL https://raw.githubusercontent.com/Wei-Shaw/sub2api/main/deploy/install
---
-### Method 2: Docker Compose
+### Method 2: Docker Compose (Recommended)
Deploy with Docker Compose, including PostgreSQL and Redis containers.
@@ -137,87 +137,157 @@ Deploy with Docker Compose, including PostgreSQL and Redis containers.
- Docker 20.10+
- Docker Compose v2+
-#### Installation Steps
+#### Quick Start (One-Click Deployment)
+
+Use the automated deployment script for easy setup:
+
+```bash
+# Create deployment directory
+mkdir -p sub2api-deploy && cd sub2api-deploy
+
+# Download and run deployment preparation script
+curl -sSL https://raw.githubusercontent.com/Wei-Shaw/sub2api/main/deploy/docker-deploy.sh | bash
+
+# Start services
+docker-compose -f docker-compose.local.yml up -d
+
+# View logs
+docker-compose -f docker-compose.local.yml logs -f sub2api
+```
+
+**What the script does:**
+- Downloads `docker-compose.local.yml` and `.env.example`
+- Generates secure credentials (JWT_SECRET, TOTP_ENCRYPTION_KEY, POSTGRES_PASSWORD)
+- Creates `.env` file with auto-generated secrets
+- Creates data directories (uses local directories for easy backup/migration)
+- Displays generated credentials for your reference
+
+#### Manual Deployment
+
+If you prefer manual setup:
```bash
# 1. Clone the repository
git clone https://github.com/Wei-Shaw/sub2api.git
-cd sub2api
+cd sub2api/deploy
-# 2. Enter the deploy directory
-cd deploy
-
-# 3. Copy environment configuration
+# 2. Copy environment configuration
cp .env.example .env
-# 4. Edit configuration (set your passwords)
+# 3. Edit configuration (generate secure passwords)
nano .env
```
**Required configuration in `.env`:**
```bash
-# PostgreSQL password (REQUIRED - change this!)
+# PostgreSQL password (REQUIRED)
POSTGRES_PASSWORD=your_secure_password_here
+# JWT Secret (RECOMMENDED - keeps users logged in after restart)
+JWT_SECRET=your_jwt_secret_here
+
+# TOTP Encryption Key (RECOMMENDED - preserves 2FA after restart)
+TOTP_ENCRYPTION_KEY=your_totp_key_here
+
# Optional: Admin account
ADMIN_EMAIL=admin@example.com
ADMIN_PASSWORD=your_admin_password
# Optional: Custom port
SERVER_PORT=8080
+```
-# Optional: Security configuration
-# Enable URL allowlist validation (false to skip allowlist checks, only basic format validation)
-SECURITY_URL_ALLOWLIST_ENABLED=false
+**Generate secure secrets:**
+```bash
+# Generate JWT_SECRET
+openssl rand -hex 32
-# Allow insecure HTTP URLs when allowlist is disabled (default: false, requires https)
-# ⚠️ WARNING: Enabling this allows HTTP (plaintext) URLs which can expose API keys
-# Only recommended for:
-# - Development/testing environments
-# - Internal networks with trusted endpoints
-# - When using local test servers (http://localhost)
-# PRODUCTION: Keep this false or use HTTPS URLs only
-SECURITY_URL_ALLOWLIST_ALLOW_INSECURE_HTTP=false
+# Generate TOTP_ENCRYPTION_KEY
+openssl rand -hex 32
-# Allow private IP addresses for upstream/pricing/CRS (for internal deployments)
-SECURITY_URL_ALLOWLIST_ALLOW_PRIVATE_HOSTS=false
+# Generate POSTGRES_PASSWORD
+openssl rand -hex 32
```
```bash
+# 4. Create data directories (for local version)
+mkdir -p data postgres_data redis_data
+
# 5. Start all services
+# Option A: Local directory version (recommended - easy migration)
+docker-compose -f docker-compose.local.yml up -d
+
+# Option B: Named volumes version (simple setup)
docker-compose up -d
# 6. Check status
-docker-compose ps
+docker-compose -f docker-compose.local.yml ps
# 7. View logs
-docker-compose logs -f sub2api
+docker-compose -f docker-compose.local.yml logs -f sub2api
```
+#### Deployment Versions
+
+| Version | Data Storage | Migration | Best For |
+|---------|-------------|-----------|----------|
+| **docker-compose.local.yml** | Local directories | ✅ Easy (tar entire directory) | Production, frequent backups |
+| **docker-compose.yml** | Named volumes | ⚠️ Requires docker commands | Simple setup |
+
+**Recommendation:** Use `docker-compose.local.yml` (deployed by script) for easier data management.
+
#### Access
Open `http://YOUR_SERVER_IP:8080` in your browser.
+If admin password was auto-generated, find it in logs:
+```bash
+docker-compose -f docker-compose.local.yml logs sub2api | grep "admin password"
+```
+
#### Upgrade
```bash
# Pull latest image and recreate container
-docker-compose pull
-docker-compose up -d
+docker-compose -f docker-compose.local.yml pull
+docker-compose -f docker-compose.local.yml up -d
+```
+
+#### Easy Migration (Local Directory Version)
+
+When using `docker-compose.local.yml`, migrate to a new server easily:
+
+```bash
+# On source server
+docker-compose -f docker-compose.local.yml down
+cd ..
+tar czf sub2api-complete.tar.gz sub2api-deploy/
+
+# Transfer to new server
+scp sub2api-complete.tar.gz user@new-server:/path/
+
+# On new server
+tar xzf sub2api-complete.tar.gz
+cd sub2api-deploy/
+docker-compose -f docker-compose.local.yml up -d
```
#### Useful Commands
```bash
# Stop all services
-docker-compose down
+docker-compose -f docker-compose.local.yml down
# Restart
-docker-compose restart
+docker-compose -f docker-compose.local.yml restart
# View all logs
-docker-compose logs -f
+docker-compose -f docker-compose.local.yml logs -f
+
+# Remove all data (caution!)
+docker-compose -f docker-compose.local.yml down
+rm -rf data/ postgres_data/ redis_data/
```
---
diff --git a/README_CN.md b/README_CN.md
index 41d399d5..e609f25d 100644
--- a/README_CN.md
+++ b/README_CN.md
@@ -135,7 +135,7 @@ curl -sSL https://raw.githubusercontent.com/Wei-Shaw/sub2api/main/deploy/install
---
-### 方式二:Docker Compose
+### 方式二:Docker Compose(推荐)
使用 Docker Compose 部署,包含 PostgreSQL 和 Redis 容器。
@@ -144,87 +144,157 @@ curl -sSL https://raw.githubusercontent.com/Wei-Shaw/sub2api/main/deploy/install
- Docker 20.10+
- Docker Compose v2+
-#### 安装步骤
+#### 快速开始(一键部署)
+
+使用自动化部署脚本快速搭建:
+
+```bash
+# 创建部署目录
+mkdir -p sub2api-deploy && cd sub2api-deploy
+
+# 下载并运行部署准备脚本
+curl -sSL https://raw.githubusercontent.com/Wei-Shaw/sub2api/main/deploy/docker-deploy.sh | bash
+
+# 启动服务
+docker-compose -f docker-compose.local.yml up -d
+
+# 查看日志
+docker-compose -f docker-compose.local.yml logs -f sub2api
+```
+
+**脚本功能:**
+- 下载 `docker-compose.local.yml` 和 `.env.example`
+- 自动生成安全凭证(JWT_SECRET、TOTP_ENCRYPTION_KEY、POSTGRES_PASSWORD)
+- 创建 `.env` 文件并填充自动生成的密钥
+- 创建数据目录(使用本地目录,便于备份和迁移)
+- 显示生成的凭证供你记录
+
+#### 手动部署
+
+如果你希望手动配置:
```bash
# 1. 克隆仓库
git clone https://github.com/Wei-Shaw/sub2api.git
-cd sub2api
+cd sub2api/deploy
-# 2. 进入 deploy 目录
-cd deploy
-
-# 3. 复制环境配置文件
+# 2. 复制环境配置文件
cp .env.example .env
-# 4. 编辑配置(设置密码等)
+# 3. 编辑配置(生成安全密码)
nano .env
```
**`.env` 必须配置项:**
```bash
-# PostgreSQL 密码(必须修改!)
+# PostgreSQL 密码(必需)
POSTGRES_PASSWORD=your_secure_password_here
+# JWT 密钥(推荐 - 重启后保持用户登录状态)
+JWT_SECRET=your_jwt_secret_here
+
+# TOTP 加密密钥(推荐 - 重启后保留双因素认证)
+TOTP_ENCRYPTION_KEY=your_totp_key_here
+
# 可选:管理员账号
ADMIN_EMAIL=admin@example.com
ADMIN_PASSWORD=your_admin_password
# 可选:自定义端口
SERVER_PORT=8080
+```
-# 可选:安全配置
-# 启用 URL 白名单验证(false 则跳过白名单检查,仅做基本格式校验)
-SECURITY_URL_ALLOWLIST_ENABLED=false
+**生成安全密钥:**
+```bash
+# 生成 JWT_SECRET
+openssl rand -hex 32
-# 关闭白名单时,是否允许 http:// URL(默认 false,只允许 https://)
-# ⚠️ 警告:允许 HTTP 会暴露 API 密钥(明文传输)
-# 仅建议在以下场景使用:
-# - 开发/测试环境
-# - 内部可信网络
-# - 本地测试服务器(http://localhost)
-# 生产环境:保持 false 或仅使用 HTTPS URL
-SECURITY_URL_ALLOWLIST_ALLOW_INSECURE_HTTP=false
+# 生成 TOTP_ENCRYPTION_KEY
+openssl rand -hex 32
-# 是否允许私有 IP 地址用于上游/定价/CRS(内网部署时使用)
-SECURITY_URL_ALLOWLIST_ALLOW_PRIVATE_HOSTS=false
+# 生成 POSTGRES_PASSWORD
+openssl rand -hex 32
```
```bash
+# 4. 创建数据目录(本地版)
+mkdir -p data postgres_data redis_data
+
# 5. 启动所有服务
+# 选项 A:本地目录版(推荐 - 易于迁移)
+docker-compose -f docker-compose.local.yml up -d
+
+# 选项 B:命名卷版(简单设置)
docker-compose up -d
# 6. 查看状态
-docker-compose ps
+docker-compose -f docker-compose.local.yml ps
# 7. 查看日志
-docker-compose logs -f sub2api
+docker-compose -f docker-compose.local.yml logs -f sub2api
```
+#### 部署版本对比
+
+| 版本 | 数据存储 | 迁移便利性 | 适用场景 |
+|------|---------|-----------|---------|
+| **docker-compose.local.yml** | 本地目录 | ✅ 简单(打包整个目录) | 生产环境、频繁备份 |
+| **docker-compose.yml** | 命名卷 | ⚠️ 需要 docker 命令 | 简单设置 |
+
+**推荐:** 使用 `docker-compose.local.yml`(脚本部署)以便更轻松地管理数据。
+
#### 访问
在浏览器中打开 `http://你的服务器IP:8080`
+如果管理员密码是自动生成的,在日志中查找:
+```bash
+docker-compose -f docker-compose.local.yml logs sub2api | grep "admin password"
+```
+
#### 升级
```bash
# 拉取最新镜像并重建容器
-docker-compose pull
-docker-compose up -d
+docker-compose -f docker-compose.local.yml pull
+docker-compose -f docker-compose.local.yml up -d
+```
+
+#### 轻松迁移(本地目录版)
+
+使用 `docker-compose.local.yml` 时,可以轻松迁移到新服务器:
+
+```bash
+# 源服务器
+docker-compose -f docker-compose.local.yml down
+cd ..
+tar czf sub2api-complete.tar.gz sub2api-deploy/
+
+# 传输到新服务器
+scp sub2api-complete.tar.gz user@new-server:/path/
+
+# 新服务器
+tar xzf sub2api-complete.tar.gz
+cd sub2api-deploy/
+docker-compose -f docker-compose.local.yml up -d
```
#### 常用命令
```bash
# 停止所有服务
-docker-compose down
+docker-compose -f docker-compose.local.yml down
# 重启
-docker-compose restart
+docker-compose -f docker-compose.local.yml restart
# 查看所有日志
-docker-compose logs -f
+docker-compose -f docker-compose.local.yml logs -f
+
+# 删除所有数据(谨慎!)
+docker-compose -f docker-compose.local.yml down
+rm -rf data/ postgres_data/ redis_data/
```
---
diff --git a/deploy/.gitignore b/deploy/.gitignore
new file mode 100644
index 00000000..29a15135
--- /dev/null
+++ b/deploy/.gitignore
@@ -0,0 +1,19 @@
+# =============================================================================
+# Sub2API Deploy Directory - Git Ignore
+# =============================================================================
+
+# Data directories (generated at runtime when using docker-compose.local.yml)
+data/
+postgres_data/
+redis_data/
+
+# Environment configuration (contains sensitive information)
+.env
+
+# Backup files
+*.backup
+*.bak
+
+# Temporary files
+*.tmp
+*.log
diff --git a/deploy/README.md b/deploy/README.md
index ed4ea721..091d8ad7 100644
--- a/deploy/README.md
+++ b/deploy/README.md
@@ -13,7 +13,9 @@ This directory contains files for deploying Sub2API on Linux servers.
| File | Description |
|------|-------------|
-| `docker-compose.yml` | Docker Compose configuration |
+| `docker-compose.yml` | Docker Compose configuration (named volumes) |
+| `docker-compose.local.yml` | Docker Compose configuration (local directories, easy migration) |
+| `docker-deploy.sh` | **One-click Docker deployment script (recommended)** |
| `.env.example` | Docker environment variables template |
| `DOCKER.md` | Docker Hub documentation |
| `install.sh` | One-click binary installation script |
@@ -24,7 +26,45 @@ This directory contains files for deploying Sub2API on Linux servers.
## Docker Deployment (Recommended)
-### Quick Start
+### Method 1: One-Click Deployment (Recommended)
+
+Use the automated preparation script for the easiest setup:
+
+```bash
+# Download and run the preparation script
+curl -sSL https://raw.githubusercontent.com/Wei-Shaw/sub2api/main/deploy/docker-deploy.sh | bash
+
+# Or download first, then run
+curl -sSL https://raw.githubusercontent.com/Wei-Shaw/sub2api/main/deploy/docker-deploy.sh -o docker-deploy.sh
+chmod +x docker-deploy.sh
+./docker-deploy.sh
+```
+
+**What the script does:**
+- Downloads `docker-compose.local.yml` and `.env.example`
+- Automatically generates secure secrets (JWT_SECRET, TOTP_ENCRYPTION_KEY, POSTGRES_PASSWORD)
+- Creates `.env` file with generated secrets
+- Creates necessary data directories (data/, postgres_data/, redis_data/)
+- **Displays generated credentials** (POSTGRES_PASSWORD, JWT_SECRET, etc.)
+
+**After running the script:**
+```bash
+# Start services
+docker-compose -f docker-compose.local.yml up -d
+
+# View logs
+docker-compose -f docker-compose.local.yml logs -f sub2api
+
+# If admin password was auto-generated, find it in logs:
+docker-compose -f docker-compose.local.yml logs sub2api | grep "admin password"
+
+# Access Web UI
+# http://localhost:8080
+```
+
+### Method 2: Manual Deployment
+
+If you prefer manual control:
```bash
# Clone repository
@@ -33,18 +73,36 @@ cd sub2api/deploy
# Configure environment
cp .env.example .env
-nano .env # Set POSTGRES_PASSWORD (required)
+nano .env # Set POSTGRES_PASSWORD and other required variables
-# Start all services
-docker-compose up -d
+# Generate secure secrets (recommended)
+JWT_SECRET=$(openssl rand -hex 32)
+TOTP_ENCRYPTION_KEY=$(openssl rand -hex 32)
+echo "JWT_SECRET=${JWT_SECRET}" >> .env
+echo "TOTP_ENCRYPTION_KEY=${TOTP_ENCRYPTION_KEY}" >> .env
+
+# Create data directories
+mkdir -p data postgres_data redis_data
+
+# Start all services using local directory version
+docker-compose -f docker-compose.local.yml up -d
# View logs (check for auto-generated admin password)
-docker-compose logs -f sub2api
+docker-compose -f docker-compose.local.yml logs -f sub2api
# Access Web UI
# http://localhost:8080
```
+### Deployment Version Comparison
+
+| Version | Data Storage | Migration | Best For |
+|---------|-------------|-----------|----------|
+| **docker-compose.local.yml** | Local directories (./data, ./postgres_data, ./redis_data) | ✅ Easy (tar entire directory) | Production, need frequent backups/migration |
+| **docker-compose.yml** | Named volumes (/var/lib/docker/volumes/) | ⚠️ Requires docker commands | Simple setup, don't need migration |
+
+**Recommendation:** Use `docker-compose.local.yml` (deployed by `docker-deploy.sh`) for easier data management and migration.
+
### How Auto-Setup Works
When using Docker Compose with `AUTO_SETUP=true`:
@@ -89,6 +147,32 @@ SELECT
### Commands
+For **local directory version** (docker-compose.local.yml):
+
+```bash
+# Start services
+docker-compose -f docker-compose.local.yml up -d
+
+# Stop services
+docker-compose -f docker-compose.local.yml down
+
+# View logs
+docker-compose -f docker-compose.local.yml logs -f sub2api
+
+# Restart Sub2API only
+docker-compose -f docker-compose.local.yml restart sub2api
+
+# Update to latest version
+docker-compose -f docker-compose.local.yml pull
+docker-compose -f docker-compose.local.yml up -d
+
+# Remove all data (caution!)
+docker-compose -f docker-compose.local.yml down
+rm -rf data/ postgres_data/ redis_data/
+```
+
+For **named volumes version** (docker-compose.yml):
+
```bash
# Start services
docker-compose up -d
@@ -115,10 +199,11 @@ docker-compose down -v
| Variable | Required | Default | Description |
|----------|----------|---------|-------------|
| `POSTGRES_PASSWORD` | **Yes** | - | PostgreSQL password |
+| `JWT_SECRET` | **Recommended** | *(auto-generated)* | JWT secret (fixed for persistent sessions) |
+| `TOTP_ENCRYPTION_KEY` | **Recommended** | *(auto-generated)* | TOTP encryption key (fixed for persistent 2FA) |
| `SERVER_PORT` | No | `8080` | Server port |
| `ADMIN_EMAIL` | No | `admin@sub2api.local` | Admin email |
| `ADMIN_PASSWORD` | No | *(auto-generated)* | Admin password |
-| `JWT_SECRET` | No | *(auto-generated)* | JWT secret |
| `TZ` | No | `Asia/Shanghai` | Timezone |
| `GEMINI_OAUTH_CLIENT_ID` | No | *(builtin)* | Google OAuth client ID (Gemini OAuth). Leave empty to use the built-in Gemini CLI client. |
| `GEMINI_OAUTH_CLIENT_SECRET` | No | *(builtin)* | Google OAuth client secret (Gemini OAuth). Leave empty to use the built-in Gemini CLI client. |
@@ -127,6 +212,30 @@ docker-compose down -v
See `.env.example` for all available options.
+> **Note:** The `docker-deploy.sh` script automatically generates `JWT_SECRET`, `TOTP_ENCRYPTION_KEY`, and `POSTGRES_PASSWORD` for you.
+
+### Easy Migration (Local Directory Version)
+
+When using `docker-compose.local.yml`, all data is stored in local directories, making migration simple:
+
+```bash
+# On source server: Stop services and create archive
+cd /path/to/deployment
+docker-compose -f docker-compose.local.yml down
+cd ..
+tar czf sub2api-complete.tar.gz deployment/
+
+# Transfer to new server
+scp sub2api-complete.tar.gz user@new-server:/path/to/destination/
+
+# On new server: Extract and start
+tar xzf sub2api-complete.tar.gz
+cd deployment/
+docker-compose -f docker-compose.local.yml up -d
+```
+
+Your entire deployment (configuration + data) is migrated!
+
---
## Gemini OAuth Configuration
@@ -359,6 +468,30 @@ The main config file is at `/etc/sub2api/config.yaml` (created by Setup Wizard).
### Docker
+For **local directory version**:
+
+```bash
+# Check container status
+docker-compose -f docker-compose.local.yml ps
+
+# View detailed logs
+docker-compose -f docker-compose.local.yml logs --tail=100 sub2api
+
+# Check database connection
+docker-compose -f docker-compose.local.yml exec postgres pg_isready
+
+# Check Redis connection
+docker-compose -f docker-compose.local.yml exec redis redis-cli ping
+
+# Restart all services
+docker-compose -f docker-compose.local.yml restart
+
+# Check data directories
+ls -la data/ postgres_data/ redis_data/
+```
+
+For **named volumes version**:
+
```bash
# Check container status
docker-compose ps
diff --git a/deploy/docker-compose.local.yml b/deploy/docker-compose.local.yml
new file mode 100644
index 00000000..05ce129a
--- /dev/null
+++ b/deploy/docker-compose.local.yml
@@ -0,0 +1,222 @@
+# =============================================================================
+# Sub2API Docker Compose - Local Directory Version
+# =============================================================================
+# This configuration uses local directories for data storage instead of named
+# volumes, making it easy to migrate the entire deployment by simply copying
+# the deploy directory.
+#
+# Quick Start:
+# 1. Copy .env.example to .env and configure
+# 2. mkdir -p data postgres_data redis_data
+# 3. docker-compose -f docker-compose.local.yml up -d
+# 4. Check logs: docker-compose -f docker-compose.local.yml logs -f sub2api
+# 5. Access: http://localhost:8080
+#
+# Migration to New Server:
+# 1. docker-compose -f docker-compose.local.yml down
+# 2. tar czf sub2api-deploy.tar.gz deploy/
+# 3. Transfer to new server and extract
+# 4. docker-compose -f docker-compose.local.yml up -d
+# =============================================================================
+
+services:
+ # ===========================================================================
+ # Sub2API Application
+ # ===========================================================================
+ sub2api:
+ image: weishaw/sub2api:latest
+ container_name: sub2api
+ restart: unless-stopped
+ ulimits:
+ nofile:
+ soft: 100000
+ hard: 100000
+ ports:
+ - "${BIND_HOST:-0.0.0.0}:${SERVER_PORT:-8080}:8080"
+ volumes:
+ # Local directory mapping for easy migration
+ - ./data:/app/data
+ # Optional: Mount custom config.yaml (uncomment and create the file first)
+ # Copy config.example.yaml to config.yaml, modify it, then uncomment:
+ # - ./config.yaml:/app/data/config.yaml:ro
+ environment:
+ # =======================================================================
+ # Auto Setup (REQUIRED for Docker deployment)
+ # =======================================================================
+ - AUTO_SETUP=true
+
+ # =======================================================================
+ # Server Configuration
+ # =======================================================================
+ - SERVER_HOST=0.0.0.0
+ - SERVER_PORT=8080
+ - SERVER_MODE=${SERVER_MODE:-release}
+ - RUN_MODE=${RUN_MODE:-standard}
+
+ # =======================================================================
+ # Database Configuration (PostgreSQL)
+ # =======================================================================
+ - DATABASE_HOST=postgres
+ - DATABASE_PORT=5432
+ - DATABASE_USER=${POSTGRES_USER:-sub2api}
+ - DATABASE_PASSWORD=${POSTGRES_PASSWORD:?POSTGRES_PASSWORD is required}
+ - DATABASE_DBNAME=${POSTGRES_DB:-sub2api}
+ - DATABASE_SSLMODE=disable
+
+ # =======================================================================
+ # Redis Configuration
+ # =======================================================================
+ - REDIS_HOST=redis
+ - REDIS_PORT=6379
+ - REDIS_PASSWORD=${REDIS_PASSWORD:-}
+ - REDIS_DB=${REDIS_DB:-0}
+ - REDIS_ENABLE_TLS=${REDIS_ENABLE_TLS:-false}
+
+ # =======================================================================
+ # Admin Account (auto-created on first run)
+ # =======================================================================
+ - ADMIN_EMAIL=${ADMIN_EMAIL:-admin@sub2api.local}
+ - ADMIN_PASSWORD=${ADMIN_PASSWORD:-}
+
+ # =======================================================================
+ # JWT Configuration
+ # =======================================================================
+ # IMPORTANT: Set a fixed JWT_SECRET to prevent login sessions from being
+ # invalidated after container restarts. If left empty, a random secret
+ # will be generated on each startup.
+ # Generate a secure secret: openssl rand -hex 32
+ - JWT_SECRET=${JWT_SECRET:-}
+ - JWT_EXPIRE_HOUR=${JWT_EXPIRE_HOUR:-24}
+
+ # =======================================================================
+ # TOTP (2FA) Configuration
+ # =======================================================================
+ # IMPORTANT: Set a fixed encryption key for TOTP secrets. If left empty,
+ # a random key will be generated on each startup, causing all existing
+ # TOTP configurations to become invalid (users won't be able to login
+ # with 2FA).
+ # Generate a secure key: openssl rand -hex 32
+ - TOTP_ENCRYPTION_KEY=${TOTP_ENCRYPTION_KEY:-}
+
+ # =======================================================================
+ # Timezone Configuration
+ # This affects ALL time operations in the application:
+ # - Database timestamps
+ # - Usage statistics "today" boundary
+ # - Subscription expiry times
+ # - Log timestamps
+ # Common values: Asia/Shanghai, America/New_York, Europe/London, UTC
+ # =======================================================================
+ - TZ=${TZ:-Asia/Shanghai}
+
+ # =======================================================================
+ # Gemini OAuth Configuration (for Gemini accounts)
+ # =======================================================================
+ - GEMINI_OAUTH_CLIENT_ID=${GEMINI_OAUTH_CLIENT_ID:-}
+ - GEMINI_OAUTH_CLIENT_SECRET=${GEMINI_OAUTH_CLIENT_SECRET:-}
+ - GEMINI_OAUTH_SCOPES=${GEMINI_OAUTH_SCOPES:-}
+ - GEMINI_QUOTA_POLICY=${GEMINI_QUOTA_POLICY:-}
+
+ # =======================================================================
+ # Security Configuration (URL Allowlist)
+ # =======================================================================
+ # Enable URL allowlist validation (false to skip allowlist checks)
+ - SECURITY_URL_ALLOWLIST_ENABLED=${SECURITY_URL_ALLOWLIST_ENABLED:-false}
+ # Allow insecure HTTP URLs when allowlist is disabled (default: false, requires https)
+ - SECURITY_URL_ALLOWLIST_ALLOW_INSECURE_HTTP=${SECURITY_URL_ALLOWLIST_ALLOW_INSECURE_HTTP:-false}
+ # Allow private IP addresses for upstream/pricing/CRS (for internal deployments)
+ - SECURITY_URL_ALLOWLIST_ALLOW_PRIVATE_HOSTS=${SECURITY_URL_ALLOWLIST_ALLOW_PRIVATE_HOSTS:-false}
+ # Upstream hosts whitelist (comma-separated, only used when enabled=true)
+ - SECURITY_URL_ALLOWLIST_UPSTREAM_HOSTS=${SECURITY_URL_ALLOWLIST_UPSTREAM_HOSTS:-}
+
+ # =======================================================================
+ # Update Configuration (在线更新配置)
+ # =======================================================================
+ # Proxy for accessing GitHub (online updates + pricing data)
+ # Examples: http://host:port, socks5://host:port
+ - UPDATE_PROXY_URL=${UPDATE_PROXY_URL:-}
+ depends_on:
+ postgres:
+ condition: service_healthy
+ redis:
+ condition: service_healthy
+ networks:
+ - sub2api-network
+ healthcheck:
+ test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
+ interval: 30s
+ timeout: 10s
+ retries: 3
+ start_period: 30s
+
+ # ===========================================================================
+ # PostgreSQL Database
+ # ===========================================================================
+ postgres:
+ image: postgres:18-alpine
+ container_name: sub2api-postgres
+ restart: unless-stopped
+ ulimits:
+ nofile:
+ soft: 100000
+ hard: 100000
+ volumes:
+ # Local directory mapping for easy migration
+ - ./postgres_data:/var/lib/postgresql/data
+ environment:
+ - POSTGRES_USER=${POSTGRES_USER:-sub2api}
+ - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:?POSTGRES_PASSWORD is required}
+ - POSTGRES_DB=${POSTGRES_DB:-sub2api}
+ - PGDATA=/var/lib/postgresql/data
+ - TZ=${TZ:-Asia/Shanghai}
+ networks:
+ - sub2api-network
+ healthcheck:
+ test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-sub2api} -d ${POSTGRES_DB:-sub2api}"]
+ interval: 10s
+ timeout: 5s
+ retries: 5
+ start_period: 10s
+ # 注意:不暴露端口到宿主机,应用通过内部网络连接
+ # 如需调试,可临时添加:ports: ["127.0.0.1:5433:5432"]
+
+ # ===========================================================================
+ # Redis Cache
+ # ===========================================================================
+ redis:
+ image: redis:8-alpine
+ container_name: sub2api-redis
+ restart: unless-stopped
+ ulimits:
+ nofile:
+ soft: 100000
+ hard: 100000
+ volumes:
+ # Local directory mapping for easy migration
+ - ./redis_data:/data
+ command: >
+ sh -c '
+ redis-server
+ --save 60 1
+ --appendonly yes
+ --appendfsync everysec
+ ${REDIS_PASSWORD:+--requirepass "$REDIS_PASSWORD"}'
+ environment:
+ - TZ=${TZ:-Asia/Shanghai}
+ # REDISCLI_AUTH is used by redis-cli for authentication (safer than -a flag)
+ - REDISCLI_AUTH=${REDIS_PASSWORD:-}
+ networks:
+ - sub2api-network
+ healthcheck:
+ test: ["CMD", "redis-cli", "ping"]
+ interval: 10s
+ timeout: 5s
+ retries: 5
+ start_period: 5s
+
+# =============================================================================
+# Networks
+# =============================================================================
+networks:
+ sub2api-network:
+ driver: bridge
diff --git a/deploy/docker-deploy.sh b/deploy/docker-deploy.sh
new file mode 100644
index 00000000..1e4ce81f
--- /dev/null
+++ b/deploy/docker-deploy.sh
@@ -0,0 +1,171 @@
+#!/bin/bash
+# =============================================================================
+# Sub2API Docker Deployment Preparation Script
+# =============================================================================
+# This script prepares deployment files for Sub2API:
+# - Downloads docker-compose.local.yml and .env.example
+# - Generates secure secrets (JWT_SECRET, TOTP_ENCRYPTION_KEY, POSTGRES_PASSWORD)
+# - Creates necessary data directories
+#
+# After running this script, you can start services with:
+# docker-compose -f docker-compose.local.yml up -d
+# =============================================================================
+
+set -e
+
+# Colors for output
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+BLUE='\033[0;34m'
+NC='\033[0m' # No Color
+
+# GitHub raw content base URL
+GITHUB_RAW_URL="https://raw.githubusercontent.com/Wei-Shaw/sub2api/main/deploy"
+
+# Print colored message
+print_info() {
+ echo -e "${BLUE}[INFO]${NC} $1"
+}
+
+print_success() {
+ echo -e "${GREEN}[SUCCESS]${NC} $1"
+}
+
+print_warning() {
+ echo -e "${YELLOW}[WARNING]${NC} $1"
+}
+
+print_error() {
+ echo -e "${RED}[ERROR]${NC} $1"
+}
+
+# Generate random secret
+generate_secret() {
+ openssl rand -hex 32
+}
+
+# Check if command exists
+command_exists() {
+ command -v "$1" >/dev/null 2>&1
+}
+
+# Main installation function
+main() {
+ echo ""
+ echo "=========================================="
+ echo " Sub2API Deployment Preparation"
+ echo "=========================================="
+ echo ""
+
+ # Check if openssl is available
+ if ! command_exists openssl; then
+ print_error "openssl is not installed. Please install openssl first."
+ exit 1
+ fi
+
+ # Check if deployment already exists
+ if [ -f "docker-compose.local.yml" ] && [ -f ".env" ]; then
+ print_warning "Deployment files already exist in current directory."
+ read -p "Overwrite existing files? (y/N): " -r
+ echo
+ if [[ ! $REPLY =~ ^[Yy]$ ]]; then
+ print_info "Cancelled."
+ exit 0
+ fi
+ fi
+
+ # Download docker-compose.local.yml
+ print_info "Downloading docker-compose.local.yml..."
+ if command_exists curl; then
+ curl -sSL "${GITHUB_RAW_URL}/docker-compose.local.yml" -o docker-compose.local.yml
+ elif command_exists wget; then
+ wget -q "${GITHUB_RAW_URL}/docker-compose.local.yml" -O docker-compose.local.yml
+ else
+ print_error "Neither curl nor wget is installed. Please install one of them."
+ exit 1
+ fi
+ print_success "Downloaded docker-compose.local.yml"
+
+ # Download .env.example
+ print_info "Downloading .env.example..."
+ if command_exists curl; then
+ curl -sSL "${GITHUB_RAW_URL}/.env.example" -o .env.example
+ else
+ wget -q "${GITHUB_RAW_URL}/.env.example" -O .env.example
+ fi
+ print_success "Downloaded .env.example"
+
+ # Generate .env file with auto-generated secrets
+ print_info "Generating secure secrets..."
+ echo ""
+
+ # Generate secrets
+ JWT_SECRET=$(generate_secret)
+ TOTP_ENCRYPTION_KEY=$(generate_secret)
+ POSTGRES_PASSWORD=$(generate_secret)
+
+ # Create .env from .env.example
+ cp .env.example .env
+
+ # Update .env with generated secrets (cross-platform compatible)
+ if sed --version >/dev/null 2>&1; then
+ # GNU sed (Linux)
+ sed -i "s/^JWT_SECRET=.*/JWT_SECRET=${JWT_SECRET}/" .env
+ sed -i "s/^TOTP_ENCRYPTION_KEY=.*/TOTP_ENCRYPTION_KEY=${TOTP_ENCRYPTION_KEY}/" .env
+ sed -i "s/^POSTGRES_PASSWORD=.*/POSTGRES_PASSWORD=${POSTGRES_PASSWORD}/" .env
+ else
+ # BSD sed (macOS)
+ sed -i '' "s/^JWT_SECRET=.*/JWT_SECRET=${JWT_SECRET}/" .env
+ sed -i '' "s/^TOTP_ENCRYPTION_KEY=.*/TOTP_ENCRYPTION_KEY=${TOTP_ENCRYPTION_KEY}/" .env
+ sed -i '' "s/^POSTGRES_PASSWORD=.*/POSTGRES_PASSWORD=${POSTGRES_PASSWORD}/" .env
+ fi
+
+ # Create data directories
+ print_info "Creating data directories..."
+ mkdir -p data postgres_data redis_data
+ print_success "Created data directories"
+
+ # Set secure permissions for .env file (readable/writable only by owner)
+ chmod 600 .env
+ echo ""
+
+ # Display completion message
+ echo "=========================================="
+ echo " Preparation Complete!"
+ echo "=========================================="
+ echo ""
+ echo "Generated secure credentials:"
+ echo " POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}"
+ echo " JWT_SECRET: ${JWT_SECRET}"
+ echo " TOTP_ENCRYPTION_KEY: ${TOTP_ENCRYPTION_KEY}"
+ echo ""
+ print_warning "These credentials have been saved to .env file."
+ print_warning "Please keep them secure and do not share publicly!"
+ echo ""
+ echo "Directory structure:"
+ echo " docker-compose.local.yml - Docker Compose configuration"
+ echo " .env - Environment variables (generated secrets)"
+ echo " .env.example - Example template (for reference)"
+ echo " data/ - Application data (will be created on first run)"
+ echo " postgres_data/ - PostgreSQL data"
+ echo " redis_data/ - Redis data"
+ echo ""
+ echo "Next steps:"
+ echo " 1. (Optional) Edit .env to customize configuration"
+ echo " 2. Start services:"
+ echo " docker-compose -f docker-compose.local.yml up -d"
+ echo ""
+ echo " 3. View logs:"
+ echo " docker-compose -f docker-compose.local.yml logs -f sub2api"
+ echo ""
+ echo " 4. Access Web UI:"
+ echo " http://localhost:8080"
+ echo ""
+ print_info "If admin password is not set in .env, it will be auto-generated."
+ print_info "Check logs for the generated admin password on first startup."
+ echo ""
+}
+
+# Run main function
+main "$@"
From 426ce616c0628a9208825a8acbc1b3651f368157 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=E5=B0=8F=E5=8C=97?=
Date: Mon, 2 Feb 2026 17:41:27 +0800
Subject: [PATCH 71/99] =?UTF-8?q?feat:=20=E6=94=AF=E6=8C=81=E5=9C=A8?=
=?UTF-8?q?=E7=94=A8=E6=88=B7=E6=90=9C=E7=B4=A2=E4=B8=AD=E4=BD=BF=E7=94=A8?=
=?UTF-8?q?=E5=A4=87=E6=B3=A8=E5=AD=97=E6=AE=B5?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
- 在用户仓库的搜索过滤器中添加备注字段
- 管理员现在可以通过备注/标记搜索用户
- 使用不区分大小写的搜索(ContainsFold)
Changes:
- backend/internal/repository/user_repo.go: 添加 NotesContainsFold 到搜索条件
---
backend/internal/repository/user_repo.go | 1 +
1 file changed, 1 insertion(+)
diff --git a/backend/internal/repository/user_repo.go b/backend/internal/repository/user_repo.go
index fe5b645c..654bd16b 100644
--- a/backend/internal/repository/user_repo.go
+++ b/backend/internal/repository/user_repo.go
@@ -190,6 +190,7 @@ func (r *userRepository) ListWithFilters(ctx context.Context, params pagination.
dbuser.Or(
dbuser.EmailContainsFold(filters.Search),
dbuser.UsernameContainsFold(filters.Search),
+ dbuser.NotesContainsFold(filters.Search),
),
)
}
From ae18397ca62132ea77c68e6a0ca8fc5ebc86e784 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=E5=B0=8F=E5=8C=97?=
Date: Mon, 2 Feb 2026 17:44:50 +0800
Subject: [PATCH 72/99] =?UTF-8?q?feat:=20=E5=90=91=E7=94=A8=E6=88=B7?=
=?UTF-8?q?=E6=98=BE=E7=A4=BA=E7=AE=A1=E7=90=86=E5=91=98=E8=B0=83=E6=95=B4?=
=?UTF-8?q?=E4=BD=99=E9=A2=9D=E7=9A=84=E5=A4=87=E6=B3=A8?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
- 为RedeemCode DTO添加notes字段(仅用于admin_balance/admin_concurrency类型)
- 更新mapper使其有条件地包含备注信息
- 在用户兑换历史UI中显示备注
- 备注以斜体显示,悬停时显示完整内容
用户现在可以看到管理员调整其余额的原因说明。
Changes:
- backend/internal/handler/dto/types.go: RedeemCode添加notes字段
- backend/internal/handler/dto/mappers.go: 条件性填充notes
- frontend/src/api/redeem.ts: TypeScript接口添加notes
- frontend/src/views/user/RedeemView.vue: UI显示备注信息
---
backend/internal/handler/dto/mappers.go | 10 +++++++++-
backend/internal/handler/dto/types.go | 4 ++++
frontend/src/api/redeem.ts | 4 +++-
frontend/src/views/user/RedeemView.vue | 8 ++++++++
4 files changed, 24 insertions(+), 2 deletions(-)
diff --git a/backend/internal/handler/dto/mappers.go b/backend/internal/handler/dto/mappers.go
index d58a8a29..886a5535 100644
--- a/backend/internal/handler/dto/mappers.go
+++ b/backend/internal/handler/dto/mappers.go
@@ -321,7 +321,7 @@ func RedeemCodeFromServiceAdmin(rc *service.RedeemCode) *AdminRedeemCode {
}
func redeemCodeFromServiceBase(rc *service.RedeemCode) RedeemCode {
- return RedeemCode{
+ out := RedeemCode{
ID: rc.ID,
Code: rc.Code,
Type: rc.Type,
@@ -335,6 +335,14 @@ func redeemCodeFromServiceBase(rc *service.RedeemCode) RedeemCode {
User: UserFromServiceShallow(rc.User),
Group: GroupFromServiceShallow(rc.Group),
}
+
+ // For admin_balance/admin_concurrency types, include notes so users can see
+ // why they were charged or credited by admin
+ if (rc.Type == "admin_balance" || rc.Type == "admin_concurrency") && rc.Notes != "" {
+ out.Notes = &rc.Notes
+ }
+
+ return out
}
// AccountSummaryFromService returns a minimal AccountSummary for usage log display.
diff --git a/backend/internal/handler/dto/types.go b/backend/internal/handler/dto/types.go
index 938d707c..4cfaef5f 100644
--- a/backend/internal/handler/dto/types.go
+++ b/backend/internal/handler/dto/types.go
@@ -198,6 +198,10 @@ type RedeemCode struct {
GroupID *int64 `json:"group_id"`
ValidityDays int `json:"validity_days"`
+ // Notes is only populated for admin_balance/admin_concurrency types
+ // so users can see why they were charged or credited
+ Notes *string `json:"notes,omitempty"`
+
User *User `json:"user,omitempty"`
Group *Group `json:"group,omitempty"`
}
diff --git a/frontend/src/api/redeem.ts b/frontend/src/api/redeem.ts
index 9e1c7d94..22abf4d8 100644
--- a/frontend/src/api/redeem.ts
+++ b/frontend/src/api/redeem.ts
@@ -14,7 +14,9 @@ export interface RedeemHistoryItem {
status: string
used_at: string
created_at: string
- // 订阅类型专用字段
+ // Notes from admin for admin_balance/admin_concurrency types
+ notes?: string
+ // Subscription-specific fields
group_id?: number
validity_days?: number
group?: {
diff --git a/frontend/src/views/user/RedeemView.vue b/frontend/src/views/user/RedeemView.vue
index 96158596..5850c084 100644
--- a/frontend/src/views/user/RedeemView.vue
+++ b/frontend/src/views/user/RedeemView.vue
@@ -312,6 +312,14 @@
{{ t('redeem.adminAdjustment') }}
+
+
+ {{ item.notes }}
+
From c441638fc01ca9aeffb60133a2d459d53429ecf5 Mon Sep 17 00:00:00 2001
From: JIA-ss <627723154@qq.com>
Date: Mon, 2 Feb 2026 18:30:06 +0800
Subject: [PATCH 73/99] =?UTF-8?q?feat(gateway):=20=E5=A2=9E=E5=BC=BA=20/v1?=
=?UTF-8?q?/usage=20=E7=AB=AF=E7=82=B9=E8=BF=94=E5=9B=9E=E5=AE=8C=E6=95=B4?=
=?UTF-8?q?=E7=94=A8=E9=87=8F=E7=BB=9F=E8=AE=A1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
为 CC Switch 集成增强 /v1/usage 网关端点,在保持原有 4 字段
(isValid, planName, remaining, unit) 向后兼容的基础上,新增:
- usage 对象:今日/累计的请求数、token 用量、费用,以及 RPM/TPM
- subscription 对象(订阅模式):日/周/月用量和限额、过期时间
- balance 字段(余额模式):当前钱包余额
用量数据获取采用 best-effort 策略,失败不影响基础响应。
Co-Authored-By: Claude Opus 4.5
---
backend/cmd/server/wire_gen.go | 2 +-
backend/internal/handler/gateway_handler.go | 68 ++++++++++++++++++---
2 files changed, 62 insertions(+), 8 deletions(-)
diff --git a/backend/cmd/server/wire_gen.go b/backend/cmd/server/wire_gen.go
index 7d465fee..fd4383bf 100644
--- a/backend/cmd/server/wire_gen.go
+++ b/backend/cmd/server/wire_gen.go
@@ -173,7 +173,7 @@ func initializeApplication(buildInfo handler.BuildInfo) (*Application, error) {
userAttributeService := service.NewUserAttributeService(userAttributeDefinitionRepository, userAttributeValueRepository)
userAttributeHandler := admin.NewUserAttributeHandler(userAttributeService)
adminHandlers := handler.ProvideAdminHandlers(dashboardHandler, adminUserHandler, groupHandler, accountHandler, adminAnnouncementHandler, oAuthHandler, openAIOAuthHandler, geminiOAuthHandler, antigravityOAuthHandler, proxyHandler, adminRedeemHandler, promoHandler, settingHandler, opsHandler, systemHandler, adminSubscriptionHandler, adminUsageHandler, userAttributeHandler)
- gatewayHandler := handler.NewGatewayHandler(gatewayService, geminiMessagesCompatService, antigravityGatewayService, userService, concurrencyService, billingCacheService, configConfig)
+ gatewayHandler := handler.NewGatewayHandler(gatewayService, geminiMessagesCompatService, antigravityGatewayService, userService, concurrencyService, billingCacheService, usageService, configConfig)
openAIGatewayHandler := handler.NewOpenAIGatewayHandler(openAIGatewayService, concurrencyService, billingCacheService, configConfig)
handlerSettingHandler := handler.ProvideSettingHandler(settingService, buildInfo)
totpHandler := handler.NewTotpHandler(totpService)
diff --git a/backend/internal/handler/gateway_handler.go b/backend/internal/handler/gateway_handler.go
index 70ea51bf..842242ca 100644
--- a/backend/internal/handler/gateway_handler.go
+++ b/backend/internal/handler/gateway_handler.go
@@ -30,6 +30,7 @@ type GatewayHandler struct {
antigravityGatewayService *service.AntigravityGatewayService
userService *service.UserService
billingCacheService *service.BillingCacheService
+ usageService *service.UsageService
concurrencyHelper *ConcurrencyHelper
maxAccountSwitches int
maxAccountSwitchesGemini int
@@ -43,6 +44,7 @@ func NewGatewayHandler(
userService *service.UserService,
concurrencyService *service.ConcurrencyService,
billingCacheService *service.BillingCacheService,
+ usageService *service.UsageService,
cfg *config.Config,
) *GatewayHandler {
pingInterval := time.Duration(0)
@@ -63,6 +65,7 @@ func NewGatewayHandler(
antigravityGatewayService: antigravityGatewayService,
userService: userService,
billingCacheService: billingCacheService,
+ usageService: usageService,
concurrencyHelper: NewConcurrencyHelper(concurrencyService, SSEPingFormatClaude, pingInterval),
maxAccountSwitches: maxAccountSwitches,
maxAccountSwitchesGemini: maxAccountSwitchesGemini,
@@ -524,7 +527,7 @@ func (h *GatewayHandler) AntigravityModels(c *gin.Context) {
})
}
-// Usage handles getting account balance for CC Switch integration
+// Usage handles getting account balance and usage statistics for CC Switch integration
// GET /v1/usage
func (h *GatewayHandler) Usage(c *gin.Context) {
apiKey, ok := middleware2.GetAPIKeyFromContext(c)
@@ -539,7 +542,40 @@ func (h *GatewayHandler) Usage(c *gin.Context) {
return
}
- // 订阅模式:返回订阅限额信息
+ // Best-effort: 获取用量统计,失败不影响基础响应
+ var usageData gin.H
+ if h.usageService != nil {
+ dashStats, err := h.usageService.GetUserDashboardStats(c.Request.Context(), subject.UserID)
+ if err == nil && dashStats != nil {
+ usageData = gin.H{
+ "today": gin.H{
+ "requests": dashStats.TodayRequests,
+ "input_tokens": dashStats.TodayInputTokens,
+ "output_tokens": dashStats.TodayOutputTokens,
+ "cache_creation_tokens": dashStats.TodayCacheCreationTokens,
+ "cache_read_tokens": dashStats.TodayCacheReadTokens,
+ "total_tokens": dashStats.TodayTokens,
+ "cost": dashStats.TodayCost,
+ "actual_cost": dashStats.TodayActualCost,
+ },
+ "total": gin.H{
+ "requests": dashStats.TotalRequests,
+ "input_tokens": dashStats.TotalInputTokens,
+ "output_tokens": dashStats.TotalOutputTokens,
+ "cache_creation_tokens": dashStats.TotalCacheCreationTokens,
+ "cache_read_tokens": dashStats.TotalCacheReadTokens,
+ "total_tokens": dashStats.TotalTokens,
+ "cost": dashStats.TotalCost,
+ "actual_cost": dashStats.TotalActualCost,
+ },
+ "average_duration_ms": dashStats.AverageDurationMs,
+ "rpm": dashStats.Rpm,
+ "tpm": dashStats.Tpm,
+ }
+ }
+ }
+
+ // 订阅模式:返回订阅限额信息 + 用量统计
if apiKey.Group != nil && apiKey.Group.IsSubscriptionType() {
subscription, ok := middleware2.GetSubscriptionFromContext(c)
if !ok {
@@ -548,28 +584,46 @@ func (h *GatewayHandler) Usage(c *gin.Context) {
}
remaining := h.calculateSubscriptionRemaining(apiKey.Group, subscription)
- c.JSON(http.StatusOK, gin.H{
+ resp := gin.H{
"isValid": true,
"planName": apiKey.Group.Name,
"remaining": remaining,
"unit": "USD",
- })
+ "subscription": gin.H{
+ "daily_usage_usd": subscription.DailyUsageUSD,
+ "weekly_usage_usd": subscription.WeeklyUsageUSD,
+ "monthly_usage_usd": subscription.MonthlyUsageUSD,
+ "daily_limit_usd": apiKey.Group.DailyLimitUSD,
+ "weekly_limit_usd": apiKey.Group.WeeklyLimitUSD,
+ "monthly_limit_usd": apiKey.Group.MonthlyLimitUSD,
+ "expires_at": subscription.ExpiresAt,
+ },
+ }
+ if usageData != nil {
+ resp["usage"] = usageData
+ }
+ c.JSON(http.StatusOK, resp)
return
}
- // 余额模式:返回钱包余额
+ // 余额模式:返回钱包余额 + 用量统计
latestUser, err := h.userService.GetByID(c.Request.Context(), subject.UserID)
if err != nil {
h.errorResponse(c, http.StatusInternalServerError, "api_error", "Failed to get user info")
return
}
- c.JSON(http.StatusOK, gin.H{
+ resp := gin.H{
"isValid": true,
"planName": "钱包余额",
"remaining": latestUser.Balance,
"unit": "USD",
- })
+ "balance": latestUser.Balance,
+ }
+ if usageData != nil {
+ resp["usage"] = usageData
+ }
+ c.JSON(http.StatusOK, resp)
}
// calculateSubscriptionRemaining 计算订阅剩余可用额度
From 673caf41a02946a6562848f0b887ffdeeed39e8c Mon Sep 17 00:00:00 2001
From: Zero Clover
Date: Mon, 2 Feb 2026 18:50:54 +0800
Subject: [PATCH 74/99] =?UTF-8?q?feat(ops):=20=E5=B0=86=20USER=5FINACTIVE?=
=?UTF-8?q?=20=E9=94=99=E8=AF=AF=E6=8E=92=E9=99=A4=E5=9C=A8=20SLA=20?=
=?UTF-8?q?=E7=BB=9F=E8=AE=A1=E4=B9=8B=E5=A4=96?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
将账户停用 (USER_INACTIVE) 导致的请求失败视为业务限制类错误,不计入 SLA 和错误率统计。
账户停用是预期内的业务结果,不应被视为系统错误或服务质量问题。此改动使错误分类更加准确,避免将预期的业务限制误报为系统故障。
修改内容:
- 在 classifyOpsIsBusinessLimited 函数中添加 USER_INACTIVE 错误码
- 该类错误不再触发错误率告警
Fixes Wei-Shaw/sub2api#453
---
backend/internal/handler/ops_error_logger.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/backend/internal/handler/ops_error_logger.go b/backend/internal/handler/ops_error_logger.go
index f62e6b3e..4dc0a9cc 100644
--- a/backend/internal/handler/ops_error_logger.go
+++ b/backend/internal/handler/ops_error_logger.go
@@ -905,7 +905,7 @@ func classifyOpsIsRetryable(errType string, statusCode int) bool {
func classifyOpsIsBusinessLimited(errType, phase, code string, status int, message string) bool {
switch strings.TrimSpace(code) {
- case "INSUFFICIENT_BALANCE", "USAGE_LIMIT_EXCEEDED", "SUBSCRIPTION_NOT_FOUND", "SUBSCRIPTION_INVALID":
+ case "INSUFFICIENT_BALANCE", "USAGE_LIMIT_EXCEEDED", "SUBSCRIPTION_NOT_FOUND", "SUBSCRIPTION_INVALID", "USER_INACTIVE":
return true
}
if phase == "billing" || phase == "concurrency" {
From 79fa18132b09d85445ba035320dc6d5efe9ee36b Mon Sep 17 00:00:00 2001
From: shaw
Date: Mon, 2 Feb 2026 19:58:23 +0800
Subject: [PATCH 75/99] =?UTF-8?q?fix(gateway):=20=E4=BF=AE=E5=A4=8D=20OAut?=
=?UTF-8?q?h=20token=20=E5=88=B7=E6=96=B0=E5=90=8E=E8=B0=83=E5=BA=A6?=
=?UTF-8?q?=E5=99=A8=E7=BC=93=E5=AD=98=E4=B8=8D=E4=B8=80=E8=87=B4=E9=97=AE?=
=?UTF-8?q?=E9=A2=98?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Token 刷新成功后,调度器缓存中的 Account 对象仍包含旧的 credentials,
导致在 Outbox 异步更新之前(最多 1 秒窗口)请求使用过期 token,
返回 403 错误(OAuth token has been revoked)。
修复方案:在 token 刷新成功后同步更新调度器缓存,确保调度获取的
Account 对象立即包含最新的 access_token 和 _token_version。
此修复覆盖所有 OAuth 平台:OpenAI、Claude、Gemini、Antigravity。
---
backend/cmd/server/wire_gen.go | 2 +-
.../internal/service/token_refresh_service.go | 12 +++++++++++
.../service/token_refresh_service_test.go | 20 +++++++++----------
backend/internal/service/wire.go | 3 ++-
4 files changed, 25 insertions(+), 12 deletions(-)
diff --git a/backend/cmd/server/wire_gen.go b/backend/cmd/server/wire_gen.go
index 7d465fee..e99979ef 100644
--- a/backend/cmd/server/wire_gen.go
+++ b/backend/cmd/server/wire_gen.go
@@ -188,7 +188,7 @@ func initializeApplication(buildInfo handler.BuildInfo) (*Application, error) {
opsAlertEvaluatorService := service.ProvideOpsAlertEvaluatorService(opsService, opsRepository, emailService, redisClient, configConfig)
opsCleanupService := service.ProvideOpsCleanupService(opsRepository, db, redisClient, configConfig)
opsScheduledReportService := service.ProvideOpsScheduledReportService(opsService, userService, emailService, redisClient, configConfig)
- tokenRefreshService := service.ProvideTokenRefreshService(accountRepository, oAuthService, openAIOAuthService, geminiOAuthService, antigravityOAuthService, compositeTokenCacheInvalidator, configConfig)
+ tokenRefreshService := service.ProvideTokenRefreshService(accountRepository, oAuthService, openAIOAuthService, geminiOAuthService, antigravityOAuthService, compositeTokenCacheInvalidator, schedulerCache, configConfig)
accountExpiryService := service.ProvideAccountExpiryService(accountRepository)
subscriptionExpiryService := service.ProvideSubscriptionExpiryService(userSubscriptionRepository)
v := provideCleanup(client, redisClient, opsMetricsCollector, opsAggregationService, opsAlertEvaluatorService, opsCleanupService, opsScheduledReportService, schedulerSnapshotService, tokenRefreshService, accountExpiryService, subscriptionExpiryService, usageCleanupService, pricingService, emailQueueService, billingCacheService, oAuthService, openAIOAuthService, geminiOAuthService, antigravityOAuthService)
diff --git a/backend/internal/service/token_refresh_service.go b/backend/internal/service/token_refresh_service.go
index 6ef92bbf..c33cbf48 100644
--- a/backend/internal/service/token_refresh_service.go
+++ b/backend/internal/service/token_refresh_service.go
@@ -18,6 +18,7 @@ type TokenRefreshService struct {
refreshers []TokenRefresher
cfg *config.TokenRefreshConfig
cacheInvalidator TokenCacheInvalidator
+ schedulerCache SchedulerCache // 用于同步更新调度器缓存,解决 token 刷新后缓存不一致问题
stopCh chan struct{}
wg sync.WaitGroup
@@ -31,12 +32,14 @@ func NewTokenRefreshService(
geminiOAuthService *GeminiOAuthService,
antigravityOAuthService *AntigravityOAuthService,
cacheInvalidator TokenCacheInvalidator,
+ schedulerCache SchedulerCache,
cfg *config.Config,
) *TokenRefreshService {
s := &TokenRefreshService{
accountRepo: accountRepo,
cfg: &cfg.TokenRefresh,
cacheInvalidator: cacheInvalidator,
+ schedulerCache: schedulerCache,
stopCh: make(chan struct{}),
}
@@ -198,6 +201,15 @@ func (s *TokenRefreshService) refreshWithRetry(ctx context.Context, account *Acc
log.Printf("[TokenRefresh] Token cache invalidated for account %d", account.ID)
}
}
+ // 同步更新调度器缓存,确保调度获取的 Account 对象包含最新的 credentials
+ // 这解决了 token 刷新后调度器缓存数据不一致的问题(#445)
+ if s.schedulerCache != nil {
+ if err := s.schedulerCache.SetAccount(ctx, account); err != nil {
+ log.Printf("[TokenRefresh] Failed to sync scheduler cache for account %d: %v", account.ID, err)
+ } else {
+ log.Printf("[TokenRefresh] Scheduler cache synced for account %d", account.ID)
+ }
+ }
return nil
}
diff --git a/backend/internal/service/token_refresh_service_test.go b/backend/internal/service/token_refresh_service_test.go
index d23a0bb6..8e16c6f5 100644
--- a/backend/internal/service/token_refresh_service_test.go
+++ b/backend/internal/service/token_refresh_service_test.go
@@ -70,7 +70,7 @@ func TestTokenRefreshService_RefreshWithRetry_InvalidatesCache(t *testing.T) {
RetryBackoffSeconds: 0,
},
}
- service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, cfg)
+ service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, nil, cfg)
account := &Account{
ID: 5,
Platform: PlatformGemini,
@@ -98,7 +98,7 @@ func TestTokenRefreshService_RefreshWithRetry_InvalidatorErrorIgnored(t *testing
RetryBackoffSeconds: 0,
},
}
- service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, cfg)
+ service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, nil, cfg)
account := &Account{
ID: 6,
Platform: PlatformGemini,
@@ -124,7 +124,7 @@ func TestTokenRefreshService_RefreshWithRetry_NilInvalidator(t *testing.T) {
RetryBackoffSeconds: 0,
},
}
- service := NewTokenRefreshService(repo, nil, nil, nil, nil, nil, cfg)
+ service := NewTokenRefreshService(repo, nil, nil, nil, nil, nil, nil, cfg)
account := &Account{
ID: 7,
Platform: PlatformGemini,
@@ -151,7 +151,7 @@ func TestTokenRefreshService_RefreshWithRetry_Antigravity(t *testing.T) {
RetryBackoffSeconds: 0,
},
}
- service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, cfg)
+ service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, nil, cfg)
account := &Account{
ID: 8,
Platform: PlatformAntigravity,
@@ -179,7 +179,7 @@ func TestTokenRefreshService_RefreshWithRetry_NonOAuthAccount(t *testing.T) {
RetryBackoffSeconds: 0,
},
}
- service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, cfg)
+ service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, nil, cfg)
account := &Account{
ID: 9,
Platform: PlatformGemini,
@@ -207,7 +207,7 @@ func TestTokenRefreshService_RefreshWithRetry_OtherPlatformOAuth(t *testing.T) {
RetryBackoffSeconds: 0,
},
}
- service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, cfg)
+ service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, nil, cfg)
account := &Account{
ID: 10,
Platform: PlatformOpenAI, // OpenAI OAuth 账户
@@ -235,7 +235,7 @@ func TestTokenRefreshService_RefreshWithRetry_UpdateFailed(t *testing.T) {
RetryBackoffSeconds: 0,
},
}
- service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, cfg)
+ service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, nil, cfg)
account := &Account{
ID: 11,
Platform: PlatformGemini,
@@ -264,7 +264,7 @@ func TestTokenRefreshService_RefreshWithRetry_RefreshFailed(t *testing.T) {
RetryBackoffSeconds: 0,
},
}
- service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, cfg)
+ service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, nil, cfg)
account := &Account{
ID: 12,
Platform: PlatformGemini,
@@ -291,7 +291,7 @@ func TestTokenRefreshService_RefreshWithRetry_AntigravityRefreshFailed(t *testin
RetryBackoffSeconds: 0,
},
}
- service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, cfg)
+ service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, nil, cfg)
account := &Account{
ID: 13,
Platform: PlatformAntigravity,
@@ -318,7 +318,7 @@ func TestTokenRefreshService_RefreshWithRetry_AntigravityNonRetryableError(t *te
RetryBackoffSeconds: 0,
},
}
- service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, cfg)
+ service := NewTokenRefreshService(repo, nil, nil, nil, nil, invalidator, nil, cfg)
account := &Account{
ID: 14,
Platform: PlatformAntigravity,
diff --git a/backend/internal/service/wire.go b/backend/internal/service/wire.go
index 096e15a0..4b721bb6 100644
--- a/backend/internal/service/wire.go
+++ b/backend/internal/service/wire.go
@@ -44,9 +44,10 @@ func ProvideTokenRefreshService(
geminiOAuthService *GeminiOAuthService,
antigravityOAuthService *AntigravityOAuthService,
cacheInvalidator TokenCacheInvalidator,
+ schedulerCache SchedulerCache,
cfg *config.Config,
) *TokenRefreshService {
- svc := NewTokenRefreshService(accountRepo, oauthService, openaiOAuthService, geminiOAuthService, antigravityOAuthService, cacheInvalidator, cfg)
+ svc := NewTokenRefreshService(accountRepo, oauthService, openaiOAuthService, geminiOAuthService, antigravityOAuthService, cacheInvalidator, schedulerCache, cfg)
svc.Start()
return svc
}
From ad1cdba338ef88e7f8c1d0a5360fca80d95a56a2 Mon Sep 17 00:00:00 2001
From: Zero Clover
Date: Mon, 2 Feb 2026 20:16:17 +0800
Subject: [PATCH 76/99] =?UTF-8?q?feat(ops):=20=E6=94=AF=E6=8C=81=E8=BF=87?=
=?UTF-8?q?=E6=BB=A4=E6=97=A0=E6=95=88=20API=20Key=20=E9=94=99=E8=AF=AF?=
=?UTF-8?q?=EF=BC=8C=E4=B8=8D=E5=86=99=E5=85=A5=E9=94=99=E8=AF=AF=E6=97=A5?=
=?UTF-8?q?=E5=BF=97?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
新增 IgnoreInvalidApiKeyErrors 开关,启用后 INVALID_API_KEY 和
API_KEY_REQUIRED 错误将被完全跳过,不写入 Ops 错误日志。
这些错误由用户错误配置导致,与服务质量无关。
---
backend/internal/handler/ops_error_logger.go | 7 +++++++
backend/internal/service/ops_settings_models.go | 1 +
frontend/src/i18n/locales/en.ts | 2 ++
frontend/src/i18n/locales/zh.ts | 4 +++-
.../views/admin/ops/components/OpsSettingsDialog.vue | 10 ++++++++++
5 files changed, 23 insertions(+), 1 deletion(-)
diff --git a/backend/internal/handler/ops_error_logger.go b/backend/internal/handler/ops_error_logger.go
index f62e6b3e..4d346842 100644
--- a/backend/internal/handler/ops_error_logger.go
+++ b/backend/internal/handler/ops_error_logger.go
@@ -1011,5 +1011,12 @@ func shouldSkipOpsErrorLog(ctx context.Context, ops *service.OpsService, message
}
}
+ // Check if invalid/missing API key errors should be ignored (user misconfiguration)
+ if settings.IgnoreInvalidApiKeyErrors {
+ if strings.Contains(bodyLower, "invalid_api_key") || strings.Contains(bodyLower, "api_key_required") {
+ return true
+ }
+ }
+
return false
}
diff --git a/backend/internal/service/ops_settings_models.go b/backend/internal/service/ops_settings_models.go
index df06f578..ecc62220 100644
--- a/backend/internal/service/ops_settings_models.go
+++ b/backend/internal/service/ops_settings_models.go
@@ -83,6 +83,7 @@ type OpsAdvancedSettings struct {
IgnoreCountTokensErrors bool `json:"ignore_count_tokens_errors"`
IgnoreContextCanceled bool `json:"ignore_context_canceled"`
IgnoreNoAvailableAccounts bool `json:"ignore_no_available_accounts"`
+ IgnoreInvalidApiKeyErrors bool `json:"ignore_invalid_api_key_errors"`
AutoRefreshEnabled bool `json:"auto_refresh_enabled"`
AutoRefreshIntervalSec int `json:"auto_refresh_interval_seconds"`
}
diff --git a/frontend/src/i18n/locales/en.ts b/frontend/src/i18n/locales/en.ts
index bb7defd8..1d53ddb6 100644
--- a/frontend/src/i18n/locales/en.ts
+++ b/frontend/src/i18n/locales/en.ts
@@ -2792,6 +2792,8 @@ export default {
ignoreContextCanceledHint: 'When enabled, client disconnect (context canceled) errors will not be written to the error log.',
ignoreNoAvailableAccounts: 'Ignore no available accounts errors',
ignoreNoAvailableAccountsHint: 'When enabled, "No available accounts" errors will not be written to the error log (not recommended; usually a config issue).',
+ ignoreInvalidApiKeyErrors: 'Ignore invalid API key errors',
+ ignoreInvalidApiKeyErrorsHint: 'When enabled, invalid or missing API key errors (INVALID_API_KEY, API_KEY_REQUIRED) will not be written to the error log.',
autoRefresh: 'Auto Refresh',
enableAutoRefresh: 'Enable auto refresh',
enableAutoRefreshHint: 'Automatically refresh dashboard data at a fixed interval.',
diff --git a/frontend/src/i18n/locales/zh.ts b/frontend/src/i18n/locales/zh.ts
index 2e6230b2..a0ed426e 100644
--- a/frontend/src/i18n/locales/zh.ts
+++ b/frontend/src/i18n/locales/zh.ts
@@ -2944,7 +2944,9 @@ export default {
ignoreContextCanceled: '忽略客户端断连错误',
ignoreContextCanceledHint: '启用后,客户端主动断开连接(context canceled)的错误将不会写入错误日志。',
ignoreNoAvailableAccounts: '忽略无可用账号错误',
- ignoreNoAvailableAccountsHint: '启用后,“No available accounts” 错误将不会写入错误日志(不推荐,这通常是配置问题)。',
+ ignoreNoAvailableAccountsHint: '启用后,"No available accounts" 错误将不会写入错误日志(不推荐,这通常是配置问题)。',
+ ignoreInvalidApiKeyErrors: '忽略无效 API Key 错误',
+ ignoreInvalidApiKeyErrorsHint: '启用后,无效或缺失 API Key 的错误(INVALID_API_KEY、API_KEY_REQUIRED)将不会写入错误日志。',
autoRefresh: '自动刷新',
enableAutoRefresh: '启用自动刷新',
enableAutoRefreshHint: '自动刷新仪表板数据,启用后会定期拉取最新数据。',
diff --git a/frontend/src/views/admin/ops/components/OpsSettingsDialog.vue b/frontend/src/views/admin/ops/components/OpsSettingsDialog.vue
index 53ab6683..3bec6d0d 100644
--- a/frontend/src/views/admin/ops/components/OpsSettingsDialog.vue
+++ b/frontend/src/views/admin/ops/components/OpsSettingsDialog.vue
@@ -505,6 +505,16 @@ async function saveAllSettings() {
+
+
+
+
+
+ {{ t('admin.ops.settings.ignoreInvalidApiKeyErrorsHint') }}
+
+
+
+
From 7b1d63a7867e9f41ef06a4afb039ed14b1a026fd Mon Sep 17 00:00:00 2001
From: shaw
Date: Mon, 2 Feb 2026 21:01:32 +0800
Subject: [PATCH 77/99] =?UTF-8?q?fix(types):=20=E6=B7=BB=E5=8A=A0=E7=BC=BA?=
=?UTF-8?q?=E5=A4=B1=E7=9A=84=20ignore=5Finvalid=5Fapi=5Fkey=5Ferrors=20?=
=?UTF-8?q?=E7=B1=BB=E5=9E=8B=E5=AE=9A=E4=B9=89?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
OpsAdvancedSettings 接口缺少 ignore_invalid_api_key_errors 字段,
导致 TypeScript 编译报错。
---
frontend/src/api/admin/ops.ts | 1 +
1 file changed, 1 insertion(+)
diff --git a/frontend/src/api/admin/ops.ts b/frontend/src/api/admin/ops.ts
index 6e048436..9e0444b1 100644
--- a/frontend/src/api/admin/ops.ts
+++ b/frontend/src/api/admin/ops.ts
@@ -776,6 +776,7 @@ export interface OpsAdvancedSettings {
ignore_count_tokens_errors: boolean
ignore_context_canceled: boolean
ignore_no_available_accounts: boolean
+ ignore_invalid_api_key_errors: boolean
auto_refresh_enabled: boolean
auto_refresh_interval_seconds: number
}
From 45e1429ae8bd9ed1c32e6eced2a74e81457b062d Mon Sep 17 00:00:00 2001
From: liuxiongfeng
Date: Mon, 2 Feb 2026 16:37:22 +0800
Subject: [PATCH 78/99] =?UTF-8?q?feat(billing):=20=E6=B7=BB=E5=8A=A0=20Gem?=
=?UTF-8?q?ini=20200K=20=E9=95=BF=E4=B8=8A=E4=B8=8B=E6=96=87=E5=8F=8C?=
=?UTF-8?q?=E5=80=8D=E8=AE=A1=E8=B4=B9=E5=8A=9F=E8=83=BD?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
- 新增 CalculateCostWithLongContext 方法支持阈值双倍计费
- 新增 RecordUsageWithLongContext 方法专用于 Gemini 计费
- Gemini 超过 200K token 的部分按 2 倍费率计算
- 其他平台(Claude/OpenAI)完全不受影响
---
.../internal/handler/gemini_v1beta_handler.go | 21 ++-
backend/internal/service/billing_service.go | 59 +++++++
backend/internal/service/gateway_service.go | 156 ++++++++++++++++++
3 files changed, 227 insertions(+), 9 deletions(-)
diff --git a/backend/internal/handler/gemini_v1beta_handler.go b/backend/internal/handler/gemini_v1beta_handler.go
index 32f83013..d1b19ede 100644
--- a/backend/internal/handler/gemini_v1beta_handler.go
+++ b/backend/internal/handler/gemini_v1beta_handler.go
@@ -366,18 +366,21 @@ func (h *GatewayHandler) GeminiV1BetaModels(c *gin.Context) {
userAgent := c.GetHeader("User-Agent")
clientIP := ip.GetClientIP(c)
- // 6) record usage async
+ // 6) record usage async (Gemini 使用长上下文双倍计费)
go func(result *service.ForwardResult, usedAccount *service.Account, ua, ip string) {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
- if err := h.gatewayService.RecordUsage(ctx, &service.RecordUsageInput{
- Result: result,
- APIKey: apiKey,
- User: apiKey.User,
- Account: usedAccount,
- Subscription: subscription,
- UserAgent: ua,
- IPAddress: ip,
+
+ if err := h.gatewayService.RecordUsageWithLongContext(ctx, &service.RecordUsageLongContextInput{
+ Result: result,
+ APIKey: apiKey,
+ User: apiKey.User,
+ Account: usedAccount,
+ Subscription: subscription,
+ UserAgent: ua,
+ IPAddress: ip,
+ LongContextThreshold: 200000, // Gemini 200K 阈值
+ LongContextMultiplier: 2.0, // 超出部分双倍计费
}); err != nil {
log.Printf("Record usage failed: %v", err)
}
diff --git a/backend/internal/service/billing_service.go b/backend/internal/service/billing_service.go
index f2afc343..95e16c4e 100644
--- a/backend/internal/service/billing_service.go
+++ b/backend/internal/service/billing_service.go
@@ -241,6 +241,65 @@ func (s *BillingService) CalculateCostWithConfig(model string, tokens UsageToken
return s.CalculateCost(model, tokens, multiplier)
}
+// CalculateCostWithLongContext 计算费用,支持长上下文双倍计费
+// threshold: 阈值(如 200000),超过此值的部分按 extraMultiplier 倍计费
+// extraMultiplier: 超出部分的倍率(如 2.0 表示双倍)
+func (s *BillingService) CalculateCostWithLongContext(model string, tokens UsageTokens, rateMultiplier float64, threshold int, extraMultiplier float64) (*CostBreakdown, error) {
+ // 1. 先正常计算全部 token 的成本
+ cost, err := s.CalculateCost(model, tokens, rateMultiplier)
+ if err != nil {
+ return nil, err
+ }
+
+ // 2. 如果未启用长上下文计费或未超过阈值,直接返回
+ if threshold <= 0 || extraMultiplier <= 1 {
+ return cost, nil
+ }
+
+ // 计算总输入 token(缓存读取 + 新输入)
+ total := tokens.CacheReadTokens + tokens.InputTokens
+ if total <= threshold {
+ return cost, nil
+ }
+
+ // 3. 拆分超出部分的 token
+ extra := total - threshold
+ var extraCacheTokens, extraInputTokens int
+
+ if tokens.CacheReadTokens >= threshold {
+ // 缓存已超过阈值:超出的缓存 + 全部输入
+ extraCacheTokens = tokens.CacheReadTokens - threshold
+ extraInputTokens = tokens.InputTokens
+ } else {
+ // 缓存未超过阈值:只有输入超出部分
+ extraCacheTokens = 0
+ extraInputTokens = extra
+ }
+
+ // 4. 计算超出部分的成本(只算输入和缓存读取)
+ extraTokens := UsageTokens{
+ InputTokens: extraInputTokens,
+ CacheReadTokens: extraCacheTokens,
+ }
+ extraCost, err := s.CalculateCost(model, extraTokens, 1.0) // 先按 1 倍算
+ if err != nil {
+ return cost, nil // 出错时返回正常成本
+ }
+
+ // 5. 额外成本 = 超出部分成本 × (倍率 - 1)
+ extraRate := extraMultiplier - 1
+ additionalInputCost := extraCost.InputCost * extraRate
+ additionalCacheCost := extraCost.CacheReadCost * extraRate
+
+ // 6. 累加到总成本
+ cost.InputCost += additionalInputCost
+ cost.CacheReadCost += additionalCacheCost
+ cost.TotalCost += additionalInputCost + additionalCacheCost
+ cost.ActualCost = cost.TotalCost * rateMultiplier
+
+ return cost, nil
+}
+
// ListSupportedModels 列出所有支持的模型(现在总是返回true,因为有模糊匹配)
func (s *BillingService) ListSupportedModels() []string {
models := make([]string, 0)
diff --git a/backend/internal/service/gateway_service.go b/backend/internal/service/gateway_service.go
index 7a901907..9125163a 100644
--- a/backend/internal/service/gateway_service.go
+++ b/backend/internal/service/gateway_service.go
@@ -3606,6 +3606,162 @@ func (s *GatewayService) RecordUsage(ctx context.Context, input *RecordUsageInpu
return nil
}
+// RecordUsageLongContextInput 记录使用量的输入参数(支持长上下文双倍计费)
+type RecordUsageLongContextInput struct {
+ Result *ForwardResult
+ APIKey *APIKey
+ User *User
+ Account *Account
+ Subscription *UserSubscription // 可选:订阅信息
+ UserAgent string // 请求的 User-Agent
+ IPAddress string // 请求的客户端 IP 地址
+ LongContextThreshold int // 长上下文阈值(如 200000)
+ LongContextMultiplier float64 // 超出阈值部分的倍率(如 2.0)
+}
+
+// RecordUsageWithLongContext 记录使用量并扣费,支持长上下文双倍计费(用于 Gemini)
+func (s *GatewayService) RecordUsageWithLongContext(ctx context.Context, input *RecordUsageLongContextInput) error {
+ result := input.Result
+ apiKey := input.APIKey
+ user := input.User
+ account := input.Account
+ subscription := input.Subscription
+
+ // 获取费率倍数
+ multiplier := s.cfg.Default.RateMultiplier
+ if apiKey.GroupID != nil && apiKey.Group != nil {
+ multiplier = apiKey.Group.RateMultiplier
+ }
+
+ var cost *CostBreakdown
+
+ // 根据请求类型选择计费方式
+ if result.ImageCount > 0 {
+ // 图片生成计费
+ var groupConfig *ImagePriceConfig
+ if apiKey.Group != nil {
+ groupConfig = &ImagePriceConfig{
+ Price1K: apiKey.Group.ImagePrice1K,
+ Price2K: apiKey.Group.ImagePrice2K,
+ Price4K: apiKey.Group.ImagePrice4K,
+ }
+ }
+ cost = s.billingService.CalculateImageCost(result.Model, result.ImageSize, result.ImageCount, groupConfig, multiplier)
+ } else {
+ // Token 计费(使用长上下文计费方法)
+ tokens := UsageTokens{
+ InputTokens: result.Usage.InputTokens,
+ OutputTokens: result.Usage.OutputTokens,
+ CacheCreationTokens: result.Usage.CacheCreationInputTokens,
+ CacheReadTokens: result.Usage.CacheReadInputTokens,
+ }
+ var err error
+ cost, err = s.billingService.CalculateCostWithLongContext(result.Model, tokens, multiplier, input.LongContextThreshold, input.LongContextMultiplier)
+ if err != nil {
+ log.Printf("Calculate cost failed: %v", err)
+ cost = &CostBreakdown{ActualCost: 0}
+ }
+ }
+
+ // 判断计费方式:订阅模式 vs 余额模式
+ isSubscriptionBilling := subscription != nil && apiKey.Group != nil && apiKey.Group.IsSubscriptionType()
+ billingType := BillingTypeBalance
+ if isSubscriptionBilling {
+ billingType = BillingTypeSubscription
+ }
+
+ // 创建使用日志
+ durationMs := int(result.Duration.Milliseconds())
+ var imageSize *string
+ if result.ImageSize != "" {
+ imageSize = &result.ImageSize
+ }
+ accountRateMultiplier := account.BillingRateMultiplier()
+ usageLog := &UsageLog{
+ UserID: user.ID,
+ APIKeyID: apiKey.ID,
+ AccountID: account.ID,
+ RequestID: result.RequestID,
+ Model: result.Model,
+ InputTokens: result.Usage.InputTokens,
+ OutputTokens: result.Usage.OutputTokens,
+ CacheCreationTokens: result.Usage.CacheCreationInputTokens,
+ CacheReadTokens: result.Usage.CacheReadInputTokens,
+ InputCost: cost.InputCost,
+ OutputCost: cost.OutputCost,
+ CacheCreationCost: cost.CacheCreationCost,
+ CacheReadCost: cost.CacheReadCost,
+ TotalCost: cost.TotalCost,
+ ActualCost: cost.ActualCost,
+ RateMultiplier: multiplier,
+ AccountRateMultiplier: &accountRateMultiplier,
+ BillingType: billingType,
+ Stream: result.Stream,
+ DurationMs: &durationMs,
+ FirstTokenMs: result.FirstTokenMs,
+ ImageCount: result.ImageCount,
+ ImageSize: imageSize,
+ CreatedAt: time.Now(),
+ }
+
+ // 添加 UserAgent
+ if input.UserAgent != "" {
+ usageLog.UserAgent = &input.UserAgent
+ }
+
+ // 添加 IPAddress
+ if input.IPAddress != "" {
+ usageLog.IPAddress = &input.IPAddress
+ }
+
+ // 添加分组和订阅关联
+ if apiKey.GroupID != nil {
+ usageLog.GroupID = apiKey.GroupID
+ }
+ if subscription != nil {
+ usageLog.SubscriptionID = &subscription.ID
+ }
+
+ inserted, err := s.usageLogRepo.Create(ctx, usageLog)
+ if err != nil {
+ log.Printf("Create usage log failed: %v", err)
+ }
+
+ if s.cfg != nil && s.cfg.RunMode == config.RunModeSimple {
+ log.Printf("[SIMPLE MODE] Usage recorded (not billed): user=%d, tokens=%d", usageLog.UserID, usageLog.TotalTokens())
+ s.deferredService.ScheduleLastUsedUpdate(account.ID)
+ return nil
+ }
+
+ shouldBill := inserted || err != nil
+
+ // 根据计费类型执行扣费
+ if isSubscriptionBilling {
+ // 订阅模式:更新订阅用量(使用 TotalCost 原始费用,不考虑倍率)
+ if shouldBill && cost.TotalCost > 0 {
+ if err := s.userSubRepo.IncrementUsage(ctx, subscription.ID, cost.TotalCost); err != nil {
+ log.Printf("Increment subscription usage failed: %v", err)
+ }
+ // 异步更新订阅缓存
+ s.billingCacheService.QueueUpdateSubscriptionUsage(user.ID, *apiKey.GroupID, cost.TotalCost)
+ }
+ } else {
+ // 余额模式:扣除用户余额(使用 ActualCost 考虑倍率后的费用)
+ if shouldBill && cost.ActualCost > 0 {
+ if err := s.userRepo.DeductBalance(ctx, user.ID, cost.ActualCost); err != nil {
+ log.Printf("Deduct balance failed: %v", err)
+ }
+ // 异步更新余额缓存
+ s.billingCacheService.QueueDeductBalance(user.ID, cost.ActualCost)
+ }
+ }
+
+ // Schedule batch update for account last_used_at
+ s.deferredService.ScheduleLastUsedUpdate(account.ID)
+
+ return nil
+}
+
// ForwardCountTokens 转发 count_tokens 请求到上游 API
// 特点:不记录使用量、仅支持非流式响应
func (s *GatewayService) ForwardCountTokens(ctx context.Context, c *gin.Context, account *Account, parsed *ParsedRequest) error {
From b381e8ee73e3a362ed217dce48529aba76d849c4 Mon Sep 17 00:00:00 2001
From: liuxiongfeng
Date: Mon, 2 Feb 2026 16:42:07 +0800
Subject: [PATCH 79/99] =?UTF-8?q?refactor(billing):=20=E7=AE=80=E5=8C=96?=
=?UTF-8?q?=20CalculateCostWithLongContext=20=E9=80=BB=E8=BE=91?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
将 token 直接拆分为范围内和范围外两部分,分别调用 CalculateCost:
- 范围内:正常计费 (rateMultiplier)
- 范围外:双倍计费 (rateMultiplier × extraMultiplier)
代码更直观,便于理解和维护
---
backend/internal/service/billing_service.go | 81 ++++++++++++---------
1 file changed, 46 insertions(+), 35 deletions(-)
diff --git a/backend/internal/service/billing_service.go b/backend/internal/service/billing_service.go
index 95e16c4e..db5a9708 100644
--- a/backend/internal/service/billing_service.go
+++ b/backend/internal/service/billing_service.go
@@ -244,60 +244,71 @@ func (s *BillingService) CalculateCostWithConfig(model string, tokens UsageToken
// CalculateCostWithLongContext 计算费用,支持长上下文双倍计费
// threshold: 阈值(如 200000),超过此值的部分按 extraMultiplier 倍计费
// extraMultiplier: 超出部分的倍率(如 2.0 表示双倍)
+//
+// 示例:缓存 210k + 输入 10k = 220k,阈值 200k,倍率 2.0
+// 拆分为:范围内 (200k, 0) + 范围外 (10k, 10k)
+// 范围内正常计费,范围外 × 2 计费
func (s *BillingService) CalculateCostWithLongContext(model string, tokens UsageTokens, rateMultiplier float64, threshold int, extraMultiplier float64) (*CostBreakdown, error) {
- // 1. 先正常计算全部 token 的成本
- cost, err := s.CalculateCost(model, tokens, rateMultiplier)
- if err != nil {
- return nil, err
- }
-
- // 2. 如果未启用长上下文计费或未超过阈值,直接返回
+ // 未启用长上下文计费,直接走正常计费
if threshold <= 0 || extraMultiplier <= 1 {
- return cost, nil
+ return s.CalculateCost(model, tokens, rateMultiplier)
}
// 计算总输入 token(缓存读取 + 新输入)
total := tokens.CacheReadTokens + tokens.InputTokens
if total <= threshold {
- return cost, nil
+ return s.CalculateCost(model, tokens, rateMultiplier)
}
- // 3. 拆分超出部分的 token
- extra := total - threshold
- var extraCacheTokens, extraInputTokens int
+ // 拆分成范围内和范围外
+ var inRangeCacheTokens, inRangeInputTokens int
+ var outRangeCacheTokens, outRangeInputTokens int
if tokens.CacheReadTokens >= threshold {
- // 缓存已超过阈值:超出的缓存 + 全部输入
- extraCacheTokens = tokens.CacheReadTokens - threshold
- extraInputTokens = tokens.InputTokens
+ // 缓存已超过阈值:范围内只有缓存,范围外是超出的缓存+全部输入
+ inRangeCacheTokens = threshold
+ inRangeInputTokens = 0
+ outRangeCacheTokens = tokens.CacheReadTokens - threshold
+ outRangeInputTokens = tokens.InputTokens
} else {
- // 缓存未超过阈值:只有输入超出部分
- extraCacheTokens = 0
- extraInputTokens = extra
+ // 缓存未超过阈值:范围内是全部缓存+部分输入,范围外是剩余输入
+ inRangeCacheTokens = tokens.CacheReadTokens
+ inRangeInputTokens = threshold - tokens.CacheReadTokens
+ outRangeCacheTokens = 0
+ outRangeInputTokens = tokens.InputTokens - inRangeInputTokens
}
- // 4. 计算超出部分的成本(只算输入和缓存读取)
- extraTokens := UsageTokens{
- InputTokens: extraInputTokens,
- CacheReadTokens: extraCacheTokens,
+ // 范围内部分:正常计费
+ inRangeTokens := UsageTokens{
+ InputTokens: inRangeInputTokens,
+ OutputTokens: tokens.OutputTokens, // 输出只算一次
+ CacheCreationTokens: tokens.CacheCreationTokens,
+ CacheReadTokens: inRangeCacheTokens,
}
- extraCost, err := s.CalculateCost(model, extraTokens, 1.0) // 先按 1 倍算
+ inRangeCost, err := s.CalculateCost(model, inRangeTokens, rateMultiplier)
if err != nil {
- return cost, nil // 出错时返回正常成本
+ return nil, err
}
- // 5. 额外成本 = 超出部分成本 × (倍率 - 1)
- extraRate := extraMultiplier - 1
- additionalInputCost := extraCost.InputCost * extraRate
- additionalCacheCost := extraCost.CacheReadCost * extraRate
+ // 范围外部分:× extraMultiplier 计费
+ outRangeTokens := UsageTokens{
+ InputTokens: outRangeInputTokens,
+ CacheReadTokens: outRangeCacheTokens,
+ }
+ outRangeCost, err := s.CalculateCost(model, outRangeTokens, rateMultiplier*extraMultiplier)
+ if err != nil {
+ return inRangeCost, nil // 出错时返回范围内成本
+ }
- // 6. 累加到总成本
- cost.InputCost += additionalInputCost
- cost.CacheReadCost += additionalCacheCost
- cost.TotalCost += additionalInputCost + additionalCacheCost
- cost.ActualCost = cost.TotalCost * rateMultiplier
-
- return cost, nil
+ // 合并成本
+ return &CostBreakdown{
+ InputCost: inRangeCost.InputCost + outRangeCost.InputCost,
+ OutputCost: inRangeCost.OutputCost,
+ CacheCreationCost: inRangeCost.CacheCreationCost,
+ CacheReadCost: inRangeCost.CacheReadCost + outRangeCost.CacheReadCost,
+ TotalCost: inRangeCost.TotalCost + outRangeCost.TotalCost,
+ ActualCost: inRangeCost.ActualCost + outRangeCost.ActualCost,
+ }, nil
}
// ListSupportedModels 列出所有支持的模型(现在总是返回true,因为有模糊匹配)
From e1a4a7b8c0a28f0c9e5bbfa08f8eb32f14619750 Mon Sep 17 00:00:00 2001
From: liuxiongfeng
Date: Mon, 2 Feb 2026 16:46:25 +0800
Subject: [PATCH 80/99] =?UTF-8?q?feat(groups):=20=E6=B7=BB=E5=8A=A0?=
=?UTF-8?q?=E4=BB=8E=E5=85=B6=E4=BB=96=E5=88=86=E7=BB=84=E5=A4=8D=E5=88=B6?=
=?UTF-8?q?=E8=B4=A6=E5=8F=B7=E5=8A=9F=E8=83=BD?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
- 创建分组时可选择从已有分组复制账号
- 编辑分组时支持同步账号(全量替换操作)
- 仅允许选择相同平台的源分组
- 添加完整的数据校验:去重、自引用检查、平台一致性检查
- 前端支持多选源分组,带提示说明操作行为
---
.../internal/handler/admin/group_handler.go | 72 ++++----
backend/internal/repository/group_repo.go | 58 +++++++
backend/internal/service/admin_service.go | 93 ++++++++++
backend/internal/service/group_service.go | 4 +
frontend/src/i18n/locales/en.ts | 8 +
frontend/src/i18n/locales/zh.ts | 8 +
frontend/src/types/index.ts | 3 +
frontend/src/views/admin/GroupsView.vue | 161 +++++++++++++++++-
8 files changed, 372 insertions(+), 35 deletions(-)
diff --git a/backend/internal/handler/admin/group_handler.go b/backend/internal/handler/admin/group_handler.go
index 926624d2..f93edbc8 100644
--- a/backend/internal/handler/admin/group_handler.go
+++ b/backend/internal/handler/admin/group_handler.go
@@ -43,6 +43,8 @@ type CreateGroupRequest struct {
// 模型路由配置(仅 anthropic 平台使用)
ModelRouting map[string][]int64 `json:"model_routing"`
ModelRoutingEnabled bool `json:"model_routing_enabled"`
+ // 从指定分组复制账号(创建后自动绑定)
+ CopyAccountsFromGroupIDs []int64 `json:"copy_accounts_from_group_ids"`
}
// UpdateGroupRequest represents update group request
@@ -66,6 +68,8 @@ type UpdateGroupRequest struct {
// 模型路由配置(仅 anthropic 平台使用)
ModelRouting map[string][]int64 `json:"model_routing"`
ModelRoutingEnabled *bool `json:"model_routing_enabled"`
+ // 从指定分组复制账号(同步操作:先清空当前分组的账号绑定,再绑定源分组的账号)
+ CopyAccountsFromGroupIDs []int64 `json:"copy_accounts_from_group_ids"`
}
// List handles listing all groups with pagination
@@ -155,22 +159,23 @@ func (h *GroupHandler) Create(c *gin.Context) {
}
group, err := h.adminService.CreateGroup(c.Request.Context(), &service.CreateGroupInput{
- Name: req.Name,
- Description: req.Description,
- Platform: req.Platform,
- RateMultiplier: req.RateMultiplier,
- IsExclusive: req.IsExclusive,
- SubscriptionType: req.SubscriptionType,
- DailyLimitUSD: req.DailyLimitUSD,
- WeeklyLimitUSD: req.WeeklyLimitUSD,
- MonthlyLimitUSD: req.MonthlyLimitUSD,
- ImagePrice1K: req.ImagePrice1K,
- ImagePrice2K: req.ImagePrice2K,
- ImagePrice4K: req.ImagePrice4K,
- ClaudeCodeOnly: req.ClaudeCodeOnly,
- FallbackGroupID: req.FallbackGroupID,
- ModelRouting: req.ModelRouting,
- ModelRoutingEnabled: req.ModelRoutingEnabled,
+ Name: req.Name,
+ Description: req.Description,
+ Platform: req.Platform,
+ RateMultiplier: req.RateMultiplier,
+ IsExclusive: req.IsExclusive,
+ SubscriptionType: req.SubscriptionType,
+ DailyLimitUSD: req.DailyLimitUSD,
+ WeeklyLimitUSD: req.WeeklyLimitUSD,
+ MonthlyLimitUSD: req.MonthlyLimitUSD,
+ ImagePrice1K: req.ImagePrice1K,
+ ImagePrice2K: req.ImagePrice2K,
+ ImagePrice4K: req.ImagePrice4K,
+ ClaudeCodeOnly: req.ClaudeCodeOnly,
+ FallbackGroupID: req.FallbackGroupID,
+ ModelRouting: req.ModelRouting,
+ ModelRoutingEnabled: req.ModelRoutingEnabled,
+ CopyAccountsFromGroupIDs: req.CopyAccountsFromGroupIDs,
})
if err != nil {
response.ErrorFrom(c, err)
@@ -196,23 +201,24 @@ func (h *GroupHandler) Update(c *gin.Context) {
}
group, err := h.adminService.UpdateGroup(c.Request.Context(), groupID, &service.UpdateGroupInput{
- Name: req.Name,
- Description: req.Description,
- Platform: req.Platform,
- RateMultiplier: req.RateMultiplier,
- IsExclusive: req.IsExclusive,
- Status: req.Status,
- SubscriptionType: req.SubscriptionType,
- DailyLimitUSD: req.DailyLimitUSD,
- WeeklyLimitUSD: req.WeeklyLimitUSD,
- MonthlyLimitUSD: req.MonthlyLimitUSD,
- ImagePrice1K: req.ImagePrice1K,
- ImagePrice2K: req.ImagePrice2K,
- ImagePrice4K: req.ImagePrice4K,
- ClaudeCodeOnly: req.ClaudeCodeOnly,
- FallbackGroupID: req.FallbackGroupID,
- ModelRouting: req.ModelRouting,
- ModelRoutingEnabled: req.ModelRoutingEnabled,
+ Name: req.Name,
+ Description: req.Description,
+ Platform: req.Platform,
+ RateMultiplier: req.RateMultiplier,
+ IsExclusive: req.IsExclusive,
+ Status: req.Status,
+ SubscriptionType: req.SubscriptionType,
+ DailyLimitUSD: req.DailyLimitUSD,
+ WeeklyLimitUSD: req.WeeklyLimitUSD,
+ MonthlyLimitUSD: req.MonthlyLimitUSD,
+ ImagePrice1K: req.ImagePrice1K,
+ ImagePrice2K: req.ImagePrice2K,
+ ImagePrice4K: req.ImagePrice4K,
+ ClaudeCodeOnly: req.ClaudeCodeOnly,
+ FallbackGroupID: req.FallbackGroupID,
+ ModelRouting: req.ModelRouting,
+ ModelRoutingEnabled: req.ModelRoutingEnabled,
+ CopyAccountsFromGroupIDs: req.CopyAccountsFromGroupIDs,
})
if err != nil {
response.ErrorFrom(c, err)
diff --git a/backend/internal/repository/group_repo.go b/backend/internal/repository/group_repo.go
index 5c4d6cf4..002e07da 100644
--- a/backend/internal/repository/group_repo.go
+++ b/backend/internal/repository/group_repo.go
@@ -425,3 +425,61 @@ func (r *groupRepository) loadAccountCounts(ctx context.Context, groupIDs []int6
return counts, nil
}
+
+// GetAccountIDsByGroupIDs 获取多个分组的所有账号 ID(去重)
+func (r *groupRepository) GetAccountIDsByGroupIDs(ctx context.Context, groupIDs []int64) ([]int64, error) {
+ if len(groupIDs) == 0 {
+ return nil, nil
+ }
+
+ rows, err := r.sql.QueryContext(
+ ctx,
+ "SELECT DISTINCT account_id FROM account_groups WHERE group_id = ANY($1) ORDER BY account_id",
+ pq.Array(groupIDs),
+ )
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+
+ var accountIDs []int64
+ for rows.Next() {
+ var accountID int64
+ if err := rows.Scan(&accountID); err != nil {
+ return nil, err
+ }
+ accountIDs = append(accountIDs, accountID)
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+
+ return accountIDs, nil
+}
+
+// BindAccountsToGroup 将多个账号绑定到指定分组(批量插入,忽略已存在的绑定)
+func (r *groupRepository) BindAccountsToGroup(ctx context.Context, groupID int64, accountIDs []int64) error {
+ if len(accountIDs) == 0 {
+ return nil
+ }
+
+ // 使用 INSERT ... ON CONFLICT DO NOTHING 忽略已存在的绑定
+ _, err := r.sql.ExecContext(
+ ctx,
+ `INSERT INTO account_groups (account_id, group_id, priority, created_at)
+ SELECT unnest($1::bigint[]), $2, 50, NOW()
+ ON CONFLICT (account_id, group_id) DO NOTHING`,
+ pq.Array(accountIDs),
+ groupID,
+ )
+ if err != nil {
+ return err
+ }
+
+ // 发送调度器事件
+ if err := enqueueSchedulerOutbox(ctx, r.sql, service.SchedulerOutboxEventGroupChanged, nil, &groupID, nil); err != nil {
+ log.Printf("[SchedulerOutbox] enqueue bind accounts to group failed: group=%d err=%v", groupID, err)
+ }
+
+ return nil
+}
diff --git a/backend/internal/service/admin_service.go b/backend/internal/service/admin_service.go
index 0afa0716..ef2d526b 100644
--- a/backend/internal/service/admin_service.go
+++ b/backend/internal/service/admin_service.go
@@ -110,6 +110,8 @@ type CreateGroupInput struct {
// 模型路由配置(仅 anthropic 平台使用)
ModelRouting map[string][]int64
ModelRoutingEnabled bool // 是否启用模型路由
+ // 从指定分组复制账号(创建分组后在同一事务内绑定)
+ CopyAccountsFromGroupIDs []int64
}
type UpdateGroupInput struct {
@@ -132,6 +134,8 @@ type UpdateGroupInput struct {
// 模型路由配置(仅 anthropic 平台使用)
ModelRouting map[string][]int64
ModelRoutingEnabled *bool // 是否启用模型路由
+ // 从指定分组复制账号(同步操作:先清空当前分组的账号绑定,再绑定源分组的账号)
+ CopyAccountsFromGroupIDs []int64
}
type CreateAccountInput struct {
@@ -572,6 +576,38 @@ func (s *adminServiceImpl) CreateGroup(ctx context.Context, input *CreateGroupIn
}
}
+ // 如果指定了复制账号的源分组,先获取账号 ID 列表
+ var accountIDsToCopy []int64
+ if len(input.CopyAccountsFromGroupIDs) > 0 {
+ // 去重源分组 IDs
+ seen := make(map[int64]struct{})
+ uniqueSourceGroupIDs := make([]int64, 0, len(input.CopyAccountsFromGroupIDs))
+ for _, srcGroupID := range input.CopyAccountsFromGroupIDs {
+ if _, exists := seen[srcGroupID]; !exists {
+ seen[srcGroupID] = struct{}{}
+ uniqueSourceGroupIDs = append(uniqueSourceGroupIDs, srcGroupID)
+ }
+ }
+
+ // 校验源分组的平台是否与新分组一致
+ for _, srcGroupID := range uniqueSourceGroupIDs {
+ srcGroup, err := s.groupRepo.GetByIDLite(ctx, srcGroupID)
+ if err != nil {
+ return nil, fmt.Errorf("source group %d not found: %w", srcGroupID, err)
+ }
+ if srcGroup.Platform != platform {
+ return nil, fmt.Errorf("source group %d platform mismatch: expected %s, got %s", srcGroupID, platform, srcGroup.Platform)
+ }
+ }
+
+ // 获取所有源分组的账号(去重)
+ var err error
+ accountIDsToCopy, err = s.groupRepo.GetAccountIDsByGroupIDs(ctx, uniqueSourceGroupIDs)
+ if err != nil {
+ return nil, fmt.Errorf("failed to get accounts from source groups: %w", err)
+ }
+ }
+
group := &Group{
Name: input.Name,
Description: input.Description,
@@ -593,6 +629,15 @@ func (s *adminServiceImpl) CreateGroup(ctx context.Context, input *CreateGroupIn
if err := s.groupRepo.Create(ctx, group); err != nil {
return nil, err
}
+
+ // 如果有需要复制的账号,绑定到新分组
+ if len(accountIDsToCopy) > 0 {
+ if err := s.groupRepo.BindAccountsToGroup(ctx, group.ID, accountIDsToCopy); err != nil {
+ return nil, fmt.Errorf("failed to bind accounts to new group: %w", err)
+ }
+ group.AccountCount = int64(len(accountIDsToCopy))
+ }
+
return group, nil
}
@@ -728,6 +773,54 @@ func (s *adminServiceImpl) UpdateGroup(ctx context.Context, id int64, input *Upd
if err := s.groupRepo.Update(ctx, group); err != nil {
return nil, err
}
+
+ // 如果指定了复制账号的源分组,同步绑定(替换当前分组的账号)
+ if len(input.CopyAccountsFromGroupIDs) > 0 {
+ // 去重源分组 IDs
+ seen := make(map[int64]struct{})
+ uniqueSourceGroupIDs := make([]int64, 0, len(input.CopyAccountsFromGroupIDs))
+ for _, srcGroupID := range input.CopyAccountsFromGroupIDs {
+ // 校验:源分组不能是自身
+ if srcGroupID == id {
+ return nil, fmt.Errorf("cannot copy accounts from self")
+ }
+ // 去重
+ if _, exists := seen[srcGroupID]; !exists {
+ seen[srcGroupID] = struct{}{}
+ uniqueSourceGroupIDs = append(uniqueSourceGroupIDs, srcGroupID)
+ }
+ }
+
+ // 校验源分组的平台是否与当前分组一致
+ for _, srcGroupID := range uniqueSourceGroupIDs {
+ srcGroup, err := s.groupRepo.GetByIDLite(ctx, srcGroupID)
+ if err != nil {
+ return nil, fmt.Errorf("source group %d not found: %w", srcGroupID, err)
+ }
+ if srcGroup.Platform != group.Platform {
+ return nil, fmt.Errorf("source group %d platform mismatch: expected %s, got %s", srcGroupID, group.Platform, srcGroup.Platform)
+ }
+ }
+
+ // 获取所有源分组的账号(去重)
+ accountIDsToCopy, err := s.groupRepo.GetAccountIDsByGroupIDs(ctx, uniqueSourceGroupIDs)
+ if err != nil {
+ return nil, fmt.Errorf("failed to get accounts from source groups: %w", err)
+ }
+
+ // 先清空当前分组的所有账号绑定
+ if _, err := s.groupRepo.DeleteAccountGroupsByGroupID(ctx, id); err != nil {
+ return nil, fmt.Errorf("failed to clear existing account bindings: %w", err)
+ }
+
+ // 再绑定源分组的账号
+ if len(accountIDsToCopy) > 0 {
+ if err := s.groupRepo.BindAccountsToGroup(ctx, id, accountIDsToCopy); err != nil {
+ return nil, fmt.Errorf("failed to bind accounts to group: %w", err)
+ }
+ }
+ }
+
if s.authCacheInvalidator != nil {
s.authCacheInvalidator.InvalidateAuthCacheByGroupID(ctx, id)
}
diff --git a/backend/internal/service/group_service.go b/backend/internal/service/group_service.go
index 324f347b..a2bf2073 100644
--- a/backend/internal/service/group_service.go
+++ b/backend/internal/service/group_service.go
@@ -29,6 +29,10 @@ type GroupRepository interface {
ExistsByName(ctx context.Context, name string) (bool, error)
GetAccountCount(ctx context.Context, groupID int64) (int64, error)
DeleteAccountGroupsByGroupID(ctx context.Context, groupID int64) (int64, error)
+ // GetAccountIDsByGroupIDs 获取多个分组的所有账号 ID(去重)
+ GetAccountIDsByGroupIDs(ctx context.Context, groupIDs []int64) ([]int64, error)
+ // BindAccountsToGroup 将多个账号绑定到指定分组
+ BindAccountsToGroup(ctx context.Context, groupID int64, accountIDs []int64) error
}
// CreateGroupRequest 创建分组请求
diff --git a/frontend/src/i18n/locales/en.ts b/frontend/src/i18n/locales/en.ts
index 1d53ddb6..7c4df36b 100644
--- a/frontend/src/i18n/locales/en.ts
+++ b/frontend/src/i18n/locales/en.ts
@@ -1004,6 +1004,14 @@ export default {
fallbackHint: 'Non-Claude Code requests will use this group. Leave empty to reject directly.',
noFallback: 'No Fallback (Reject)'
},
+ copyAccounts: {
+ title: 'Copy Accounts from Groups',
+ tooltip: 'Select one or more groups of the same platform. After creation, all accounts from these groups will be automatically bound to the new group (deduplicated).',
+ tooltipEdit: 'Select one or more groups of the same platform. After saving, current group accounts will be replaced with accounts from these groups (deduplicated).',
+ selectPlaceholder: 'Select groups to copy accounts from...',
+ hint: 'Multiple groups can be selected, accounts will be deduplicated',
+ hintEdit: '⚠️ Warning: This will replace all existing account bindings'
+ },
modelRouting: {
title: 'Model Routing',
tooltip: 'Configure specific model requests to be routed to designated accounts. Supports wildcard matching, e.g., claude-opus-* matches all opus models.',
diff --git a/frontend/src/i18n/locales/zh.ts b/frontend/src/i18n/locales/zh.ts
index a0ed426e..ba1c775f 100644
--- a/frontend/src/i18n/locales/zh.ts
+++ b/frontend/src/i18n/locales/zh.ts
@@ -1079,6 +1079,14 @@ export default {
fallbackHint: '非 Claude Code 请求将使用此分组,留空则直接拒绝',
noFallback: '不降级(直接拒绝)'
},
+ copyAccounts: {
+ title: '从分组复制账号',
+ tooltip: '选择一个或多个相同平台的分组,创建后会自动将这些分组的所有账号绑定到新分组(去重)。',
+ tooltipEdit: '选择一个或多个相同平台的分组,保存后当前分组的账号会被替换为这些分组的账号(去重)。',
+ selectPlaceholder: '选择分组以复制其账号...',
+ hint: '可选多个分组,账号会自动去重',
+ hintEdit: '⚠️ 注意:这会替换当前分组的所有账号绑定'
+ },
modelRouting: {
title: '模型路由配置',
tooltip: '配置特定模型请求优先路由到指定账号。支持通配符匹配,如 claude-opus-* 匹配所有 opus 模型。',
diff --git a/frontend/src/types/index.ts b/frontend/src/types/index.ts
index 9802d5c8..7c6cbf52 100644
--- a/frontend/src/types/index.ts
+++ b/frontend/src/types/index.ts
@@ -411,6 +411,8 @@ export interface CreateGroupRequest {
image_price_4k?: number | null
claude_code_only?: boolean
fallback_group_id?: number | null
+ // 从指定分组复制账号
+ copy_accounts_from_group_ids?: number[]
}
export interface UpdateGroupRequest {
@@ -429,6 +431,7 @@ export interface UpdateGroupRequest {
image_price_4k?: number | null
claude_code_only?: boolean
fallback_group_id?: number | null
+ copy_accounts_from_group_ids?: number[]
}
// ==================== Account & Proxy Types ====================
diff --git a/frontend/src/views/admin/GroupsView.vue b/frontend/src/views/admin/GroupsView.vue
index 78ef2e48..bf924f53 100644
--- a/frontend/src/views/admin/GroupsView.vue
+++ b/frontend/src/views/admin/GroupsView.vue
@@ -240,9 +240,73 @@
v-model="createForm.platform"
:options="platformOptions"
data-tour="group-form-platform"
+ @change="createForm.copy_accounts_from_group_ids = []"
/>
{{ t('admin.groups.platformHint') }}
+
+
+
+
+
+
+
+
+
+ {{ t('admin.groups.copyAccounts.tooltip') }}
+
+
+
+
+
+
+
+
+
+ {{ copyAccountsGroupOptions.find(o => o.value === groupId)?.label || `#${groupId}` }}
+
+
+
+
+
+
{{ t('admin.groups.copyAccounts.hint') }}
+
+
+
+
+
+
+
+
+
+
+ {{ t('admin.groups.copyAccounts.tooltipEdit') }}
+
+
+
+
+
+
+
+
+
+ {{ copyAccountsGroupOptionsForEdit.find(o => o.value === groupId)?.label || `#${groupId}` }}
+
+
+
+
+
+
{{ t('admin.groups.copyAccounts.hintEdit') }}
+
{
return options
})
+// 复制账号的源分组选项(创建时)- 仅包含相同平台且有账号的分组
+const copyAccountsGroupOptions = computed(() => {
+ const eligibleGroups = groups.value.filter(
+ (g) => g.platform === createForm.platform && (g.account_count || 0) > 0
+ )
+ return eligibleGroups.map((g) => ({
+ value: g.id,
+ label: `${g.name} (${g.account_count || 0} 个账号)`
+ }))
+})
+
+// 复制账号的源分组选项(编辑时)- 仅包含相同平台且有账号的分组,排除自身
+const copyAccountsGroupOptionsForEdit = computed(() => {
+ const currentId = editingGroup.value?.id
+ const eligibleGroups = groups.value.filter(
+ (g) => g.platform === editForm.platform && (g.account_count || 0) > 0 && g.id !== currentId
+ )
+ return eligibleGroups.map((g) => ({
+ value: g.id,
+ label: `${g.name} (${g.account_count || 0} 个账号)`
+ }))
+})
+
const groups = ref
([])
const loading = ref(false)
const searchQuery = ref('')
@@ -1244,7 +1394,9 @@ const createForm = reactive({
claude_code_only: false,
fallback_group_id: null as number | null,
// 模型路由开关
- model_routing_enabled: false
+ model_routing_enabled: false,
+ // 从分组复制账号
+ copy_accounts_from_group_ids: [] as number[]
})
// 简单账号类型(用于模型路由选择)
@@ -1415,7 +1567,9 @@ const editForm = reactive({
claude_code_only: false,
fallback_group_id: null as number | null,
// 模型路由开关
- model_routing_enabled: false
+ model_routing_enabled: false,
+ // 从分组复制账号
+ copy_accounts_from_group_ids: [] as number[]
})
// 根据分组类型返回不同的删除确认消息
@@ -1497,6 +1651,7 @@ const closeCreateModal = () => {
createForm.image_price_4k = null
createForm.claude_code_only = false
createForm.fallback_group_id = null
+ createForm.copy_accounts_from_group_ids = []
createModelRoutingRules.value = []
}
@@ -1547,6 +1702,7 @@ const handleEdit = async (group: AdminGroup) => {
editForm.claude_code_only = group.claude_code_only || false
editForm.fallback_group_id = group.fallback_group_id
editForm.model_routing_enabled = group.model_routing_enabled || false
+ editForm.copy_accounts_from_group_ids = [] // 复制账号字段每次编辑时重置为空
// 加载模型路由规则(异步加载账号名称)
editModelRoutingRules.value = await convertApiFormatToRoutingRules(group.model_routing)
showEditModal.value = true
@@ -1556,6 +1712,7 @@ const closeEditModal = () => {
showEditModal.value = false
editingGroup.value = null
editModelRoutingRules.value = []
+ editForm.copy_accounts_from_group_ids = []
}
const handleUpdateGroup = async () => {
From ce1d2904c7a049ceba8717bd43954addc95b90ad Mon Sep 17 00:00:00 2001
From: liuxiongfeng
Date: Mon, 2 Feb 2026 22:01:41 +0800
Subject: [PATCH 81/99] =?UTF-8?q?test:=20=E4=B8=BA=E6=B5=8B=E8=AF=95=20stu?=
=?UTF-8?q?b=20=E6=B7=BB=E5=8A=A0=E7=BC=BA=E5=A4=B1=E7=9A=84=20GroupReposi?=
=?UTF-8?q?tory=20=E6=8E=A5=E5=8F=A3=E6=96=B9=E6=B3=95?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
新增 BindAccountsToGroup 和 GetAccountIDsByGroupIDs 方法的 stub 实现,
确保测试文件中的 mock 类型满足 GroupRepository 接口要求。
---
backend/internal/repository/group_repo.go | 2 +-
backend/internal/server/api_contract_test.go | 8 ++++++++
.../service/admin_service_delete_test.go | 8 ++++++++
.../internal/service/admin_service_group_test.go | 16 ++++++++++++++++
.../service/gateway_multiplatform_test.go | 8 ++++++++
.../service/gemini_multiplatform_test.go | 8 ++++++++
6 files changed, 49 insertions(+), 1 deletion(-)
diff --git a/backend/internal/repository/group_repo.go b/backend/internal/repository/group_repo.go
index 002e07da..a5b0512d 100644
--- a/backend/internal/repository/group_repo.go
+++ b/backend/internal/repository/group_repo.go
@@ -440,7 +440,7 @@ func (r *groupRepository) GetAccountIDsByGroupIDs(ctx context.Context, groupIDs
if err != nil {
return nil, err
}
- defer rows.Close()
+ defer func() { _ = rows.Close() }()
var accountIDs []int64
for rows.Next() {
diff --git a/backend/internal/server/api_contract_test.go b/backend/internal/server/api_contract_test.go
index 22e6213e..6adab853 100644
--- a/backend/internal/server/api_contract_test.go
+++ b/backend/internal/server/api_contract_test.go
@@ -880,6 +880,14 @@ func (stubGroupRepo) DeleteAccountGroupsByGroupID(ctx context.Context, groupID i
return 0, errors.New("not implemented")
}
+func (stubGroupRepo) BindAccountsToGroup(ctx context.Context, groupID int64, accountIDs []int64) error {
+ return errors.New("not implemented")
+}
+
+func (stubGroupRepo) GetAccountIDsByGroupIDs(ctx context.Context, groupIDs []int64) ([]int64, error) {
+ return nil, errors.New("not implemented")
+}
+
type stubAccountRepo struct {
bulkUpdateIDs []int64
}
diff --git a/backend/internal/service/admin_service_delete_test.go b/backend/internal/service/admin_service_delete_test.go
index 6472ccbb..923d33ab 100644
--- a/backend/internal/service/admin_service_delete_test.go
+++ b/backend/internal/service/admin_service_delete_test.go
@@ -164,6 +164,14 @@ func (s *groupRepoStub) DeleteAccountGroupsByGroupID(ctx context.Context, groupI
panic("unexpected DeleteAccountGroupsByGroupID call")
}
+func (s *groupRepoStub) BindAccountsToGroup(ctx context.Context, groupID int64, accountIDs []int64) error {
+ panic("unexpected BindAccountsToGroup call")
+}
+
+func (s *groupRepoStub) GetAccountIDsByGroupIDs(ctx context.Context, groupIDs []int64) ([]int64, error) {
+ panic("unexpected GetAccountIDsByGroupIDs call")
+}
+
type proxyRepoStub struct {
deleteErr error
countErr error
diff --git a/backend/internal/service/admin_service_group_test.go b/backend/internal/service/admin_service_group_test.go
index e0574e2e..1daee89f 100644
--- a/backend/internal/service/admin_service_group_test.go
+++ b/backend/internal/service/admin_service_group_test.go
@@ -108,6 +108,14 @@ func (s *groupRepoStubForAdmin) DeleteAccountGroupsByGroupID(_ context.Context,
panic("unexpected DeleteAccountGroupsByGroupID call")
}
+func (s *groupRepoStubForAdmin) BindAccountsToGroup(_ context.Context, _ int64, _ []int64) error {
+ panic("unexpected BindAccountsToGroup call")
+}
+
+func (s *groupRepoStubForAdmin) GetAccountIDsByGroupIDs(_ context.Context, _ []int64) ([]int64, error) {
+ panic("unexpected GetAccountIDsByGroupIDs call")
+}
+
// TestAdminService_CreateGroup_WithImagePricing 测试创建分组时 ImagePrice 字段正确传递
func TestAdminService_CreateGroup_WithImagePricing(t *testing.T) {
repo := &groupRepoStubForAdmin{}
@@ -378,3 +386,11 @@ func (s *groupRepoStubForFallbackCycle) GetAccountCount(_ context.Context, _ int
func (s *groupRepoStubForFallbackCycle) DeleteAccountGroupsByGroupID(_ context.Context, _ int64) (int64, error) {
panic("unexpected DeleteAccountGroupsByGroupID call")
}
+
+func (s *groupRepoStubForFallbackCycle) BindAccountsToGroup(_ context.Context, _ int64, _ []int64) error {
+ panic("unexpected BindAccountsToGroup call")
+}
+
+func (s *groupRepoStubForFallbackCycle) GetAccountIDsByGroupIDs(_ context.Context, _ []int64) ([]int64, error) {
+ panic("unexpected GetAccountIDsByGroupIDs call")
+}
diff --git a/backend/internal/service/gateway_multiplatform_test.go b/backend/internal/service/gateway_multiplatform_test.go
index 26eb24e4..4bfa23d1 100644
--- a/backend/internal/service/gateway_multiplatform_test.go
+++ b/backend/internal/service/gateway_multiplatform_test.go
@@ -266,6 +266,14 @@ func (m *mockGroupRepoForGateway) DeleteAccountGroupsByGroupID(ctx context.Conte
return 0, nil
}
+func (m *mockGroupRepoForGateway) BindAccountsToGroup(ctx context.Context, groupID int64, accountIDs []int64) error {
+ return nil
+}
+
+func (m *mockGroupRepoForGateway) GetAccountIDsByGroupIDs(ctx context.Context, groupIDs []int64) ([]int64, error) {
+ return nil, nil
+}
+
func ptr[T any](v T) *T {
return &v
}
diff --git a/backend/internal/service/gemini_multiplatform_test.go b/backend/internal/service/gemini_multiplatform_test.go
index c63a020c..e7ed80fd 100644
--- a/backend/internal/service/gemini_multiplatform_test.go
+++ b/backend/internal/service/gemini_multiplatform_test.go
@@ -218,6 +218,14 @@ func (m *mockGroupRepoForGemini) DeleteAccountGroupsByGroupID(ctx context.Contex
return 0, nil
}
+func (m *mockGroupRepoForGemini) BindAccountsToGroup(ctx context.Context, groupID int64, accountIDs []int64) error {
+ return nil
+}
+
+func (m *mockGroupRepoForGemini) GetAccountIDsByGroupIDs(ctx context.Context, groupIDs []int64) ([]int64, error) {
+ return nil, nil
+}
+
var _ GroupRepository = (*mockGroupRepoForGemini)(nil)
// mockGatewayCacheForGemini Gemini 测试用的 cache mock
From 606e29d390371e322c29d720f27564ef33d5ddba Mon Sep 17 00:00:00 2001
From: bayma888
Date: Tue, 3 Feb 2026 00:16:10 +0800
Subject: [PATCH 82/99] feat(admin): add user balance/concurrency history modal
- Add new API endpoint GET /admin/users/:id/balance-history with pagination and type filter
- Add SumPositiveBalanceByUser for calculating total recharged amount
- Create UserBalanceHistoryModal component with:
- User info header (email, username, created_at, current balance, notes, total recharged)
- Type filter dropdown (all/balance/admin_balance/concurrency/admin_concurrency/subscription)
- Quick deposit/withdraw buttons
- Paginated history list with icons and colored values
- Add instant tooltip on balance column for better UX
- Add z-index prop to BaseDialog for modal stacking control
- Update i18n translations (zh/en)
---
.../internal/handler/admin/user_handler.go | 41 +++
.../internal/repository/redeem_code_repo.go | 51 +++
backend/internal/server/routes/admin.go | 1 +
backend/internal/service/admin_service.go | 19 ++
backend/internal/service/redeem_service.go | 5 +
frontend/src/api/admin/index.ts | 3 +
frontend/src/api/admin/users.ts | 50 ++-
.../admin/user/UserBalanceHistoryModal.vue | 320 ++++++++++++++++++
frontend/src/components/common/BaseDialog.vue | 10 +-
frontend/src/i18n/locales/en.ts | 14 +
frontend/src/i18n/locales/zh.ts | 14 +
frontend/src/views/admin/UsersView.vue | 64 +++-
12 files changed, 588 insertions(+), 4 deletions(-)
create mode 100644 frontend/src/components/admin/user/UserBalanceHistoryModal.vue
diff --git a/backend/internal/handler/admin/user_handler.go b/backend/internal/handler/admin/user_handler.go
index 9a5a691f..ac76689d 100644
--- a/backend/internal/handler/admin/user_handler.go
+++ b/backend/internal/handler/admin/user_handler.go
@@ -277,3 +277,44 @@ func (h *UserHandler) GetUserUsage(c *gin.Context) {
response.Success(c, stats)
}
+
+// GetBalanceHistory handles getting user's balance/concurrency change history
+// GET /api/v1/admin/users/:id/balance-history
+// Query params:
+// - type: filter by record type (balance, admin_balance, concurrency, admin_concurrency, subscription)
+func (h *UserHandler) GetBalanceHistory(c *gin.Context) {
+ userID, err := strconv.ParseInt(c.Param("id"), 10, 64)
+ if err != nil {
+ response.BadRequest(c, "Invalid user ID")
+ return
+ }
+
+ page, pageSize := response.ParsePagination(c)
+ codeType := c.Query("type")
+
+ codes, total, totalRecharged, err := h.adminService.GetUserBalanceHistory(c.Request.Context(), userID, page, pageSize, codeType)
+ if err != nil {
+ response.ErrorFrom(c, err)
+ return
+ }
+
+ // Convert to admin DTO (includes notes field for admin visibility)
+ out := make([]dto.AdminRedeemCode, 0, len(codes))
+ for i := range codes {
+ out = append(out, *dto.RedeemCodeFromServiceAdmin(&codes[i]))
+ }
+
+ // Custom response with total_recharged alongside pagination
+ pages := int((total + int64(pageSize) - 1) / int64(pageSize))
+ if pages < 1 {
+ pages = 1
+ }
+ response.Success(c, gin.H{
+ "items": out,
+ "total": total,
+ "page": page,
+ "page_size": pageSize,
+ "pages": pages,
+ "total_recharged": totalRecharged,
+ })
+}
diff --git a/backend/internal/repository/redeem_code_repo.go b/backend/internal/repository/redeem_code_repo.go
index ee8a01b5..a3a048c3 100644
--- a/backend/internal/repository/redeem_code_repo.go
+++ b/backend/internal/repository/redeem_code_repo.go
@@ -202,6 +202,57 @@ func (r *redeemCodeRepository) ListByUser(ctx context.Context, userID int64, lim
return redeemCodeEntitiesToService(codes), nil
}
+// ListByUserPaginated returns paginated balance/concurrency history for a user.
+// Supports optional type filter (e.g. "balance", "admin_balance", "concurrency", "admin_concurrency", "subscription").
+func (r *redeemCodeRepository) ListByUserPaginated(ctx context.Context, userID int64, params pagination.PaginationParams, codeType string) ([]service.RedeemCode, *pagination.PaginationResult, error) {
+ q := r.client.RedeemCode.Query().
+ Where(redeemcode.UsedByEQ(userID))
+
+ // Optional type filter
+ if codeType != "" {
+ q = q.Where(redeemcode.TypeEQ(codeType))
+ }
+
+ total, err := q.Count(ctx)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ codes, err := q.
+ WithGroup().
+ Offset(params.Offset()).
+ Limit(params.Limit()).
+ Order(dbent.Desc(redeemcode.FieldUsedAt)).
+ All(ctx)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ return redeemCodeEntitiesToService(codes), paginationResultFromTotal(int64(total), params), nil
+}
+
+// SumPositiveBalanceByUser returns total recharged amount (sum of value > 0 where type is balance/admin_balance).
+func (r *redeemCodeRepository) SumPositiveBalanceByUser(ctx context.Context, userID int64) (float64, error) {
+ var result []struct {
+ Sum float64 `json:"sum"`
+ }
+ err := r.client.RedeemCode.Query().
+ Where(
+ redeemcode.UsedByEQ(userID),
+ redeemcode.ValueGT(0),
+ redeemcode.TypeIn("balance", "admin_balance"),
+ ).
+ Aggregate(dbent.As(dbent.Sum(redeemcode.FieldValue), "sum")).
+ Scan(ctx, &result)
+ if err != nil {
+ return 0, err
+ }
+ if len(result) == 0 {
+ return 0, nil
+ }
+ return result[0].Sum, nil
+}
+
func redeemCodeEntityToService(m *dbent.RedeemCode) *service.RedeemCode {
if m == nil {
return nil
diff --git a/backend/internal/server/routes/admin.go b/backend/internal/server/routes/admin.go
index 050e724d..3ade973a 100644
--- a/backend/internal/server/routes/admin.go
+++ b/backend/internal/server/routes/admin.go
@@ -172,6 +172,7 @@ func registerUserManagementRoutes(admin *gin.RouterGroup, h *handler.Handlers) {
users.POST("/:id/balance", h.Admin.User.UpdateBalance)
users.GET("/:id/api-keys", h.Admin.User.GetUserAPIKeys)
users.GET("/:id/usage", h.Admin.User.GetUserUsage)
+ users.GET("/:id/balance-history", h.Admin.User.GetBalanceHistory)
// User attribute values
users.GET("/:id/attributes", h.Admin.UserAttribute.GetUserAttributes)
diff --git a/backend/internal/service/admin_service.go b/backend/internal/service/admin_service.go
index 0afa0716..63958e5b 100644
--- a/backend/internal/service/admin_service.go
+++ b/backend/internal/service/admin_service.go
@@ -22,6 +22,10 @@ type AdminService interface {
UpdateUserBalance(ctx context.Context, userID int64, balance float64, operation string, notes string) (*User, error)
GetUserAPIKeys(ctx context.Context, userID int64, page, pageSize int) ([]APIKey, int64, error)
GetUserUsageStats(ctx context.Context, userID int64, period string) (any, error)
+ // GetUserBalanceHistory returns paginated balance/concurrency change records for a user.
+ // codeType is optional - pass empty string to return all types.
+ // Also returns totalRecharged (sum of all positive balance top-ups).
+ GetUserBalanceHistory(ctx context.Context, userID int64, page, pageSize int, codeType string) ([]RedeemCode, int64, float64, error)
// Group management
ListGroups(ctx context.Context, page, pageSize int, platform, status, search string, isExclusive *bool) ([]Group, int64, error)
@@ -522,6 +526,21 @@ func (s *adminServiceImpl) GetUserUsageStats(ctx context.Context, userID int64,
}, nil
}
+// GetUserBalanceHistory returns paginated balance/concurrency change records for a user.
+func (s *adminServiceImpl) GetUserBalanceHistory(ctx context.Context, userID int64, page, pageSize int, codeType string) ([]RedeemCode, int64, float64, error) {
+ params := pagination.PaginationParams{Page: page, PageSize: pageSize}
+ codes, result, err := s.redeemCodeRepo.ListByUserPaginated(ctx, userID, params, codeType)
+ if err != nil {
+ return nil, 0, 0, err
+ }
+ // Aggregate total recharged amount (only once, regardless of type filter)
+ totalRecharged, err := s.redeemCodeRepo.SumPositiveBalanceByUser(ctx, userID)
+ if err != nil {
+ return nil, 0, 0, err
+ }
+ return codes, result.Total, totalRecharged, nil
+}
+
// Group management implementations
func (s *adminServiceImpl) ListGroups(ctx context.Context, page, pageSize int, platform, status, search string, isExclusive *bool) ([]Group, int64, error) {
params := pagination.PaginationParams{Page: page, PageSize: pageSize}
diff --git a/backend/internal/service/redeem_service.go b/backend/internal/service/redeem_service.go
index ff52dc47..dd99d4c6 100644
--- a/backend/internal/service/redeem_service.go
+++ b/backend/internal/service/redeem_service.go
@@ -49,6 +49,11 @@ type RedeemCodeRepository interface {
List(ctx context.Context, params pagination.PaginationParams) ([]RedeemCode, *pagination.PaginationResult, error)
ListWithFilters(ctx context.Context, params pagination.PaginationParams, codeType, status, search string) ([]RedeemCode, *pagination.PaginationResult, error)
ListByUser(ctx context.Context, userID int64, limit int) ([]RedeemCode, error)
+ // ListByUserPaginated returns paginated balance/concurrency history for a specific user.
+ // codeType filter is optional - pass empty string to return all types.
+ ListByUserPaginated(ctx context.Context, userID int64, params pagination.PaginationParams, codeType string) ([]RedeemCode, *pagination.PaginationResult, error)
+ // SumPositiveBalanceByUser returns the total recharged amount (sum of positive balance values) for a user.
+ SumPositiveBalanceByUser(ctx context.Context, userID int64) (float64, error)
}
// GenerateCodesRequest 生成兑换码请求
diff --git a/frontend/src/api/admin/index.ts b/frontend/src/api/admin/index.ts
index e86f6348..b61858f7 100644
--- a/frontend/src/api/admin/index.ts
+++ b/frontend/src/api/admin/index.ts
@@ -59,3 +59,6 @@ export {
}
export default adminAPI
+
+// Re-export types used by components
+export type { BalanceHistoryItem } from './users'
diff --git a/frontend/src/api/admin/users.ts b/frontend/src/api/admin/users.ts
index 734e3ac7..287aef96 100644
--- a/frontend/src/api/admin/users.ts
+++ b/frontend/src/api/admin/users.ts
@@ -174,6 +174,53 @@ export async function getUserUsageStats(
return data
}
+/**
+ * Balance history item returned from the API
+ */
+export interface BalanceHistoryItem {
+ id: number
+ code: string
+ type: string
+ value: number
+ status: string
+ used_by: number | null
+ used_at: string | null
+ created_at: string
+ group_id: number | null
+ validity_days: number
+ notes: string
+ user?: { id: number; email: string } | null
+ group?: { id: number; name: string } | null
+}
+
+// Balance history response extends pagination with total_recharged summary
+export interface BalanceHistoryResponse extends PaginatedResponse {
+ total_recharged: number
+}
+
+/**
+ * Get user's balance/concurrency change history
+ * @param id - User ID
+ * @param page - Page number
+ * @param pageSize - Items per page
+ * @param type - Optional type filter (balance, admin_balance, concurrency, admin_concurrency, subscription)
+ * @returns Paginated balance history with total_recharged
+ */
+export async function getUserBalanceHistory(
+ id: number,
+ page: number = 1,
+ pageSize: number = 20,
+ type?: string
+): Promise {
+ const params: Record = { page, page_size: pageSize }
+ if (type) params.type = type
+ const { data } = await apiClient.get(
+ `/admin/users/${id}/balance-history`,
+ { params }
+ )
+ return data
+}
+
export const usersAPI = {
list,
getById,
@@ -184,7 +231,8 @@ export const usersAPI = {
updateConcurrency,
toggleStatus,
getUserApiKeys,
- getUserUsageStats
+ getUserUsageStats,
+ getUserBalanceHistory
}
export default usersAPI
diff --git a/frontend/src/components/admin/user/UserBalanceHistoryModal.vue b/frontend/src/components/admin/user/UserBalanceHistoryModal.vue
new file mode 100644
index 00000000..e7dfdb7d
--- /dev/null
+++ b/frontend/src/components/admin/user/UserBalanceHistoryModal.vue
@@ -0,0 +1,320 @@
+
+
+
+
+
+
+
+
+
+ {{ user.email.charAt(0).toUpperCase() }}
+
+
+
+
+
{{ user.email }}
+
+ {{ user.username }}
+
+
+
+ {{ t('admin.users.createdAt') }}: {{ formatDateTime(user.created_at) }}
+
+
+
+
+
{{ t('admin.users.currentBalance') }}
+
+ ${{ user.balance?.toFixed(2) || '0.00' }}
+
+
+
+
+
+
+ {{ t('admin.users.notes') }}: {{ user.notes }}
+
+
+
+ {{ t('admin.users.totalRecharged') }}: ${{ totalRecharged.toFixed(2) }}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
{{ t('admin.users.noBalanceHistory') }}
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {{ getItemTitle(item) }}
+
+
+
+ {{ item.notes.length > 60 ? item.notes.substring(0, 55) + '...' : item.notes }}
+
+
+ {{ formatDateTime(item.used_at || item.created_at) }}
+
+
+
+
+
+
+ {{ formatValue(item) }}
+
+
+ {{ t('redeem.adminAdjustment') }}
+
+
+ {{ item.code.slice(0, 8) }}...
+
+
+
+
+
+
+
+
+
+
+ {{ currentPage }} / {{ totalPages }}
+
+
+
+
+
+
+
+
diff --git a/frontend/src/components/common/BaseDialog.vue b/frontend/src/components/common/BaseDialog.vue
index 3d38b568..93e4ba36 100644
--- a/frontend/src/components/common/BaseDialog.vue
+++ b/frontend/src/components/common/BaseDialog.vue
@@ -4,6 +4,7 @@
(), {
width: 'normal',
closeOnEscape: true,
- closeOnClickOutside: false
+ closeOnClickOutside: false,
+ zIndex: 50
})
const emit = defineEmits
()
+// Custom z-index style (overrides the default z-50 from CSS)
+const zIndexStyle = computed(() => {
+ return props.zIndex !== 50 ? { zIndex: props.zIndex } : undefined
+})
+
const widthClasses = computed(() => {
// Width guidance: narrow=confirm/short prompts, normal=standard forms,
// wide=multi-section forms or rich content, extra-wide=analytics/tables,
diff --git a/frontend/src/i18n/locales/en.ts b/frontend/src/i18n/locales/en.ts
index dc93d37c..f96472d5 100644
--- a/frontend/src/i18n/locales/en.ts
+++ b/frontend/src/i18n/locales/en.ts
@@ -832,6 +832,20 @@ export default {
failedToDeposit: 'Failed to deposit',
failedToWithdraw: 'Failed to withdraw',
useDepositWithdrawButtons: 'Please use deposit/withdraw buttons to adjust balance',
+ // Balance History
+ balanceHistory: 'Recharge History',
+ balanceHistoryTip: 'Click to open recharge history',
+ balanceHistoryTitle: 'User Recharge & Concurrency History',
+ noBalanceHistory: 'No records found for this user',
+ allTypes: 'All Types',
+ typeBalance: 'Balance (Redeem)',
+ typeAdminBalance: 'Balance (Admin)',
+ typeConcurrency: 'Concurrency (Redeem)',
+ typeAdminConcurrency: 'Concurrency (Admin)',
+ typeSubscription: 'Subscription',
+ failedToLoadBalanceHistory: 'Failed to load balance history',
+ createdAt: 'Created',
+ totalRecharged: 'Total Recharged',
roles: {
admin: 'Admin',
user: 'User'
diff --git a/frontend/src/i18n/locales/zh.ts b/frontend/src/i18n/locales/zh.ts
index 4b6a9be6..4bb9ee49 100644
--- a/frontend/src/i18n/locales/zh.ts
+++ b/frontend/src/i18n/locales/zh.ts
@@ -883,6 +883,20 @@ export default {
failedToDeposit: '充值失败',
failedToWithdraw: '退款失败',
useDepositWithdrawButtons: '请使用充值/退款按钮调整余额',
+ // 余额变动记录
+ balanceHistory: '充值记录',
+ balanceHistoryTip: '点击查看充值记录',
+ balanceHistoryTitle: '用户充值和并发变动记录',
+ noBalanceHistory: '暂无变动记录',
+ allTypes: '全部类型',
+ typeBalance: '余额(兑换码)',
+ typeAdminBalance: '余额(管理员调整)',
+ typeConcurrency: '并发(兑换码)',
+ typeAdminConcurrency: '并发(管理员调整)',
+ typeSubscription: '订阅',
+ failedToLoadBalanceHistory: '加载余额记录失败',
+ createdAt: '创建时间',
+ totalRecharged: '总充值',
// Settings Dropdowns
filterSettings: '筛选设置',
columnSettings: '列设置',
diff --git a/frontend/src/views/admin/UsersView.vue b/frontend/src/views/admin/UsersView.vue
index 2a73a977..7ba5462e 100644
--- a/frontend/src/views/admin/UsersView.vue
+++ b/frontend/src/views/admin/UsersView.vue
@@ -300,8 +300,29 @@
-
- ${{ value.toFixed(2) }}
+
+
+
+
+
+
+ {{ t('admin.users.balanceHistoryTip') }}
+
+
+
+
+
@@ -456,6 +477,15 @@
{{ t('admin.users.withdraw') }}
+
+
+
@@ -479,6 +509,7 @@
+
@@ -509,6 +540,7 @@ import UserEditModal from '@/components/admin/user/UserEditModal.vue'
import UserApiKeysModal from '@/components/admin/user/UserApiKeysModal.vue'
import UserAllowedGroupsModal from '@/components/admin/user/UserAllowedGroupsModal.vue'
import UserBalanceModal from '@/components/admin/user/UserBalanceModal.vue'
+import UserBalanceHistoryModal from '@/components/admin/user/UserBalanceHistoryModal.vue'
const appStore = useAppStore()
@@ -828,6 +860,10 @@ const showBalanceModal = ref(false)
const balanceUser = ref(null)
const balanceOperation = ref<'add' | 'subtract'>('add')
+// Balance History modal state
+const showBalanceHistoryModal = ref(false)
+const balanceHistoryUser = ref(null)
+
// 计算剩余天数
const getDaysRemaining = (expiresAt: string): number => {
const now = new Date()
@@ -1078,6 +1114,30 @@ const closeBalanceModal = () => {
balanceUser.value = null
}
+const handleBalanceHistory = (user: AdminUser) => {
+ balanceHistoryUser.value = user
+ showBalanceHistoryModal.value = true
+}
+
+const closeBalanceHistoryModal = () => {
+ showBalanceHistoryModal.value = false
+ balanceHistoryUser.value = null
+}
+
+// Handle deposit from balance history modal
+const handleDepositFromHistory = () => {
+ if (balanceHistoryUser.value) {
+ handleDeposit(balanceHistoryUser.value)
+ }
+}
+
+// Handle withdraw from balance history modal
+const handleWithdrawFromHistory = () => {
+ if (balanceHistoryUser.value) {
+ handleWithdraw(balanceHistoryUser.value)
+ }
+}
+
// 滚动时关闭菜单
const handleScroll = () => {
closeActionMenu()
From 03e94f9f538d325887c755d956a46931b71d2030 Mon Sep 17 00:00:00 2001
From: ianshaw
Date: Tue, 3 Feb 2026 06:01:29 +0800
Subject: [PATCH 83/99] =?UTF-8?q?fix(gemini):=20=E4=B8=BA=20Gemini=20?=
=?UTF-8?q?=E5=B7=A5=E5=85=B7=E8=B0=83=E7=94=A8=E6=B7=BB=E5=8A=A0=20though?=
=?UTF-8?q?tSignature=20=E9=81=BF=E5=85=8D=20INVALID=5FARGUMENT=20?=
=?UTF-8?q?=E9=94=99=E8=AF=AF?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.../service/gemini_messages_compat_service.go | 68 ++++++++++++++++
.../gemini_messages_compat_service_test.go | 77 +++++++++++++++++++
2 files changed, 145 insertions(+)
diff --git a/backend/internal/service/gemini_messages_compat_service.go b/backend/internal/service/gemini_messages_compat_service.go
index cbbf5fcf..2d2e86d5 100644
--- a/backend/internal/service/gemini_messages_compat_service.go
+++ b/backend/internal/service/gemini_messages_compat_service.go
@@ -36,6 +36,11 @@ const (
geminiRetryMaxDelay = 16 * time.Second
)
+// Gemini tool calling now requires `thoughtSignature` in parts that include `functionCall`.
+// Many clients don't send it; we inject a known dummy signature to satisfy the validator.
+// Ref: https://ai.google.dev/gemini-api/docs/thought-signatures
+const geminiDummyThoughtSignature = "skip_thought_signature_validator"
+
type GeminiMessagesCompatService struct {
accountRepo AccountRepository
groupRepo GroupRepository
@@ -528,6 +533,7 @@ func (s *GeminiMessagesCompatService) Forward(ctx context.Context, c *gin.Contex
if err != nil {
return nil, s.writeClaudeError(c, http.StatusBadRequest, "invalid_request_error", err.Error())
}
+ geminiReq = ensureGeminiFunctionCallThoughtSignatures(geminiReq)
originalClaudeBody := body
proxyURL := ""
@@ -978,6 +984,10 @@ func (s *GeminiMessagesCompatService) ForwardNative(ctx context.Context, c *gin.
return nil, s.writeGoogleError(c, http.StatusNotFound, "Unsupported action: "+action)
}
+ // Some Gemini upstreams validate tool call parts strictly; ensure any `functionCall` part includes a
+ // `thoughtSignature` to avoid frequent INVALID_ARGUMENT 400s.
+ body = ensureGeminiFunctionCallThoughtSignatures(body)
+
mappedModel := originalModel
if account.Type == AccountTypeAPIKey {
mappedModel = account.GetMappedModel(originalModel)
@@ -2657,6 +2667,58 @@ func nextGeminiDailyResetUnix() *int64 {
return &ts
}
+func ensureGeminiFunctionCallThoughtSignatures(body []byte) []byte {
+ // Fast path: only run when functionCall is present.
+ if !bytes.Contains(body, []byte(`"functionCall"`)) {
+ return body
+ }
+
+ var payload map[string]any
+ if err := json.Unmarshal(body, &payload); err != nil {
+ return body
+ }
+
+ contentsAny, ok := payload["contents"].([]any)
+ if !ok || len(contentsAny) == 0 {
+ return body
+ }
+
+ modified := false
+ for _, c := range contentsAny {
+ cm, ok := c.(map[string]any)
+ if !ok {
+ continue
+ }
+ partsAny, ok := cm["parts"].([]any)
+ if !ok || len(partsAny) == 0 {
+ continue
+ }
+ for _, p := range partsAny {
+ pm, ok := p.(map[string]any)
+ if !ok || pm == nil {
+ continue
+ }
+ if fc, ok := pm["functionCall"].(map[string]any); !ok || fc == nil {
+ continue
+ }
+ ts, _ := pm["thoughtSignature"].(string)
+ if strings.TrimSpace(ts) == "" {
+ pm["thoughtSignature"] = geminiDummyThoughtSignature
+ modified = true
+ }
+ }
+ }
+
+ if !modified {
+ return body
+ }
+ b, err := json.Marshal(payload)
+ if err != nil {
+ return body
+ }
+ return b
+}
+
func extractGeminiFinishReason(geminiResp map[string]any) string {
if candidates, ok := geminiResp["candidates"].([]any); ok && len(candidates) > 0 {
if cand, ok := candidates[0].(map[string]any); ok {
@@ -2856,7 +2918,13 @@ func convertClaudeMessagesToGeminiContents(messages any, toolUseIDToName map[str
if strings.TrimSpace(id) != "" && strings.TrimSpace(name) != "" {
toolUseIDToName[id] = name
}
+ signature, _ := bm["signature"].(string)
+ signature = strings.TrimSpace(signature)
+ if signature == "" {
+ signature = geminiDummyThoughtSignature
+ }
parts = append(parts, map[string]any{
+ "thoughtSignature": signature,
"functionCall": map[string]any{
"name": name,
"args": bm["input"],
diff --git a/backend/internal/service/gemini_messages_compat_service_test.go b/backend/internal/service/gemini_messages_compat_service_test.go
index d49f2eb3..f31b40ec 100644
--- a/backend/internal/service/gemini_messages_compat_service_test.go
+++ b/backend/internal/service/gemini_messages_compat_service_test.go
@@ -1,6 +1,8 @@
package service
import (
+ "encoding/json"
+ "strings"
"testing"
)
@@ -126,3 +128,78 @@ func TestConvertClaudeToolsToGeminiTools_CustomType(t *testing.T) {
})
}
}
+
+func TestConvertClaudeMessagesToGeminiGenerateContent_AddsThoughtSignatureForToolUse(t *testing.T) {
+ claudeReq := map[string]any{
+ "model": "claude-haiku-4-5-20251001",
+ "max_tokens": 10,
+ "messages": []any{
+ map[string]any{
+ "role": "user",
+ "content": []any{
+ map[string]any{"type": "text", "text": "hi"},
+ },
+ },
+ map[string]any{
+ "role": "assistant",
+ "content": []any{
+ map[string]any{"type": "text", "text": "ok"},
+ map[string]any{
+ "type": "tool_use",
+ "id": "toolu_123",
+ "name": "default_api:write_file",
+ "input": map[string]any{"path": "a.txt", "content": "x"},
+ // no signature on purpose
+ },
+ },
+ },
+ },
+ "tools": []any{
+ map[string]any{
+ "name": "default_api:write_file",
+ "description": "write file",
+ "input_schema": map[string]any{
+ "type": "object",
+ "properties": map[string]any{"path": map[string]any{"type": "string"}},
+ },
+ },
+ },
+ }
+ b, _ := json.Marshal(claudeReq)
+
+ out, err := convertClaudeMessagesToGeminiGenerateContent(b)
+ if err != nil {
+ t.Fatalf("convert failed: %v", err)
+ }
+ s := string(out)
+ if !strings.Contains(s, "\"functionCall\"") {
+ t.Fatalf("expected functionCall in output, got: %s", s)
+ }
+ if !strings.Contains(s, "\"thoughtSignature\":\""+geminiDummyThoughtSignature+"\"") {
+ t.Fatalf("expected injected thoughtSignature %q, got: %s", geminiDummyThoughtSignature, s)
+ }
+}
+
+func TestEnsureGeminiFunctionCallThoughtSignatures_InsertsWhenMissing(t *testing.T) {
+ geminiReq := map[string]any{
+ "contents": []any{
+ map[string]any{
+ "role": "user",
+ "parts": []any{
+ map[string]any{
+ "functionCall": map[string]any{
+ "name": "default_api:write_file",
+ "args": map[string]any{"path": "a.txt"},
+ },
+ },
+ },
+ },
+ },
+ }
+ b, _ := json.Marshal(geminiReq)
+ out := ensureGeminiFunctionCallThoughtSignatures(b)
+ s := string(out)
+ if !strings.Contains(s, "\"thoughtSignature\":\""+geminiDummyThoughtSignature+"\"") {
+ t.Fatalf("expected injected thoughtSignature %q, got: %s", geminiDummyThoughtSignature, s)
+ }
+}
From 0ab68aa9fb3f35750c3ddc8267f89f8c615595bc Mon Sep 17 00:00:00 2001
From: shaw
Date: Tue, 3 Feb 2026 11:24:04 +0800
Subject: [PATCH 84/99] =?UTF-8?q?fix(setup):=20=E4=BF=AE=E5=A4=8D=20Redis?=
=?UTF-8?q?=20TLS=20=E9=85=8D=E7=BD=AE=E9=80=89=E9=A1=B9=E4=BD=8D=E7=BD=AE?=
=?UTF-8?q?=E9=94=99=E8=AF=AF?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
- 将 TLS Toggle 从第一步(PostgreSQL)移动到第二步(Redis)
- 添加缺失的 Toggle 组件导入
问题描述:
TLS 配置选项错误地出现在数据库配置步骤中,而不是 Redis 配置步骤
---
frontend/src/views/setup/SetupWizardView.vue | 25 ++++++++++----------
1 file changed, 13 insertions(+), 12 deletions(-)
diff --git a/frontend/src/views/setup/SetupWizardView.vue b/frontend/src/views/setup/SetupWizardView.vue
index 00f437ba..f3c773ca 100644
--- a/frontend/src/views/setup/SetupWizardView.vue
+++ b/frontend/src/views/setup/SetupWizardView.vue
@@ -91,18 +91,6 @@
-
-
-
- {{ t("setup.redis.enableTls") }}
-
-
- {{ t("setup.redis.enableTlsHint") }}
-
-
-
-
-
@@ -237,6 +225,18 @@
+
+
+
+ {{ t("setup.redis.enableTls") }}
+
+
+ {{ t("setup.redis.enableTlsHint") }}
+
+
+
+
+