fix: merge 30 general improvements from release branch

Bug fixes:
- Detached context for GetAccountConcurrencyBatch (prevent all-zero on request cancel)
- Filter soft-deleted users in GetByGroupID
- Stripe CSP policy (allow Stripe.js in script-src and frame-src)
- WebSearch API key validation on save
- RECHARGING status in payment result success check
- Windows test fixes (logger Sync deadlock, config path escaping)

Feature enhancements:
- Webhook multi-instance dispatch (extractOutTradeNo + GetWebhookProvider)
- EasyPay mobile H5 payment (device param + PayURL2)
- SSE error propagation in WebSearch emulation
- AccountStatsCost DTO field for admin usage logs
- Plans sort by sort_order instead of created_at
- UsageMapHook for streaming response usage data
- apicompat Instructions field passthrough
- EffectiveLoadFactor for ops concurrency/metrics
- Usage billing RETURNING balance for notify system
- BulkUpdate mixed channel warning with details
- println to slog migration in auth cache
- Wire ProviderSet cleanup
- CI cache-dependency-path optimization

Frontend:
- Refund eligibility check per provider (canRequestRefund)
- Plan sort_order editing
- Dead code cleanup (simulate_claude_max, client_affinity)
- GroupsView platform switch guard
- channels features_config API type
- UsageView account_stats_cost export
This commit is contained in:
erio
2026-04-14 17:35:27 +08:00
parent f1297a3694
commit 6ac8ccde46
34 changed files with 306 additions and 118 deletions

View File

@@ -17,6 +17,7 @@ jobs:
go-version-file: backend/go.mod
check-latest: false
cache: true
cache-dependency-path: backend/go.sum
- name: Verify Go version
run: |
go version | grep -q 'go1.26.2'
@@ -36,6 +37,7 @@ jobs:
go-version-file: backend/go.mod
check-latest: false
cache: true
cache-dependency-path: backend/go.sum
- name: Verify Go version
run: |
go version | grep -q 'go1.26.2'

View File

@@ -36,19 +36,13 @@ func initializeApplication(buildInfo handler.BuildInfo) (*Application, error) {
// Business layer ProviderSets
repository.ProviderSet,
service.ProviderSet,
payment.ProviderSet,
middleware.ProviderSet,
handler.ProviderSet,
// Server layer ProviderSet
server.ProviderSet,
// Payment providers
payment.ProvideRegistry,
payment.ProvideEncryptionKey,
payment.ProvideDefaultLoadBalancer,
service.ProvidePaymentConfigService,
service.ProvidePaymentOrderExpiryService,
// Privacy client factory for OpenAI training opt-out
providePrivacyClientFactory,

View File

@@ -28,7 +28,7 @@ const (
// DefaultCSPPolicy is the default Content-Security-Policy with nonce support
// __CSP_NONCE__ will be replaced with actual nonce at request time by the SecurityHeaders middleware
const DefaultCSPPolicy = "default-src 'self'; script-src 'self' __CSP_NONCE__ https://challenges.cloudflare.com https://static.cloudflareinsights.com; style-src 'self' 'unsafe-inline' https://fonts.googleapis.com; img-src 'self' data: https:; font-src 'self' data: https://fonts.gstatic.com; connect-src 'self' https:; frame-src https://challenges.cloudflare.com; frame-ancestors 'none'; base-uri 'self'; form-action 'self'"
const DefaultCSPPolicy = "default-src 'self'; script-src 'self' __CSP_NONCE__ https://challenges.cloudflare.com https://static.cloudflareinsights.com https://*.stripe.com; style-src 'self' 'unsafe-inline' https://fonts.googleapis.com; img-src 'self' data: https:; font-src 'self' data: https://fonts.gstatic.com; connect-src 'self' https:; frame-src https://challenges.cloudflare.com https://*.stripe.com; frame-ancestors 'none'; base-uri 'self'; form-action 'self'"
// UMQ用户消息队列模式常量
const (

View File

@@ -233,12 +233,13 @@ func TestLoadForcedCodexInstructionsTemplate(t *testing.T) {
configPath := filepath.Join(tempDir, "config.yaml")
require.NoError(t, os.WriteFile(templatePath, []byte("server-prefix\n\n{{ .ExistingInstructions }}"), 0o644))
require.NoError(t, os.WriteFile(configPath, []byte("gateway:\n forced_codex_instructions_template_file: \""+templatePath+"\"\n"), 0o644))
yamlSafePath := filepath.ToSlash(templatePath)
require.NoError(t, os.WriteFile(configPath, []byte("gateway:\n forced_codex_instructions_template_file: \""+yamlSafePath+"\"\n"), 0o644))
t.Setenv("DATA_DIR", tempDir)
cfg, err := Load()
require.NoError(t, err)
require.Equal(t, templatePath, cfg.Gateway.ForcedCodexInstructionsTemplateFile)
require.Equal(t, yamlSafePath, cfg.Gateway.ForcedCodexInstructionsTemplateFile)
require.Equal(t, "server-prefix\n\n{{ .ExistingInstructions }}", cfg.Gateway.ForcedCodexInstructionsTemplate)
}

View File

@@ -1412,6 +1412,12 @@ func (h *AccountHandler) BulkUpdate(c *gin.Context) {
c.JSON(409, gin.H{
"error": "mixed_channel_warning",
"message": mixedErr.Error(),
"details": gin.H{
"group_id": mixedErr.GroupID,
"group_name": mixedErr.GroupName,
"current_platform": mixedErr.CurrentPlatform,
"other_platform": mixedErr.OtherPlatform,
},
})
return
}

View File

@@ -628,6 +628,7 @@ func UsageLogFromServiceAdmin(l *service.UsageLog) *AdminUsageLog {
ModelMappingChain: l.ModelMappingChain,
BillingTier: l.BillingTier,
AccountRateMultiplier: l.AccountRateMultiplier,
AccountStatsCost: l.AccountStatsCost,
IPAddress: l.IPAddress,
Account: AccountSummaryFromService(l.Account),
}

View File

@@ -427,6 +427,8 @@ type AdminUsageLog struct {
// AccountRateMultiplier 账号计费倍率快照nil 表示按 1.0 处理)
AccountRateMultiplier *float64 `json:"account_rate_multiplier"`
// AccountStatsCost 自定义定价规则计算的账号统计费用nil 表示使用默认公式)
AccountStatsCost *float64 `json:"account_stats_cost,omitempty"`
// IPAddress 用户请求 IP仅管理员可见
IPAddress *string `json:"ip_address,omitempty"`

View File

@@ -4,6 +4,7 @@ import (
"io"
"log/slog"
"net/http"
"net/url"
"strings"
"github.com/Wei-Shaw/sub2api/internal/payment"
@@ -72,9 +73,13 @@ func (h *PaymentWebhookHandler) handleNotify(c *gin.Context, providerKey string)
rawBody = string(body)
}
provider, err := h.registry.GetProviderByKey(providerKey)
// Extract out_trade_no to look up the order's specific provider instance.
// This is needed when multiple instances of the same provider exist (e.g. multiple EasyPay accounts).
outTradeNo := extractOutTradeNo(rawBody, providerKey)
provider, err := h.paymentService.GetWebhookProvider(c.Request.Context(), providerKey, outTradeNo)
if err != nil {
slog.Warn("[Payment Webhook] provider not registered", "provider", providerKey, "error", err)
slog.Warn("[Payment Webhook] provider not found", "provider", providerKey, "outTradeNo", outTradeNo, "error", err)
writeSuccessResponse(c, providerKey)
return
}
@@ -111,19 +116,40 @@ func (h *PaymentWebhookHandler) handleNotify(c *gin.Context, providerKey string)
writeSuccessResponse(c, providerKey)
}
// extractOutTradeNo parses the webhook body to find the out_trade_no.
// This allows looking up the correct provider instance before verification.
func extractOutTradeNo(rawBody, providerKey string) string {
switch providerKey {
case payment.TypeEasyPay:
values, err := url.ParseQuery(rawBody)
if err == nil {
return values.Get("out_trade_no")
}
}
// For other providers (Stripe, Alipay direct, WxPay direct), the registry
// typically has only one instance, so no instance lookup is needed.
return ""
}
// wxpaySuccessResponse is the JSON response expected by WeChat Pay webhook.
type wxpaySuccessResponse struct {
Code string `json:"code"`
Message string `json:"message"`
}
// WeChat Pay webhook success response constants.
const (
wxpaySuccessCode = "SUCCESS"
wxpaySuccessMessage = "成功"
)
// writeSuccessResponse sends the provider-specific success response.
// WeChat Pay requires JSON {"code":"SUCCESS","message":"成功"};
// Stripe expects an empty 200; others accept plain text "success".
func writeSuccessResponse(c *gin.Context, providerKey string) {
switch providerKey {
case payment.TypeWxpay:
c.JSON(http.StatusOK, wxpaySuccessResponse{Code: "SUCCESS", Message: "成功"})
c.JSON(http.StatusOK, wxpaySuccessResponse{Code: wxpaySuccessCode, Message: wxpaySuccessMessage})
case payment.TypeStripe:
c.String(http.StatusOK, "")
default:

View File

@@ -27,6 +27,8 @@ const (
maxEasypayResponseSize = 1 << 20 // 1MB
tradeStatusSuccess = "TRADE_SUCCESS"
signTypeMD5 = "MD5"
paymentModePopup = "popup"
deviceMobile = "mobile"
)
// EasyPay implements payment.Provider for the EasyPay aggregation platform.
@@ -61,7 +63,7 @@ func (e *EasyPay) CreatePayment(ctx context.Context, req payment.CreatePaymentRe
// Payment mode determined by instance config, not payment type.
// "popup" → hosted page (submit.php); "qrcode"/default → API call (mapi.php).
mode := e.config["paymentMode"]
if mode == "popup" {
if mode == paymentModePopup {
return e.createRedirectPayment(req)
}
return e.createAPIPayment(ctx, req)
@@ -81,6 +83,9 @@ func (e *EasyPay) createRedirectPayment(req payment.CreatePaymentRequest) (*paym
if cid := e.resolveCID(req.PaymentType); cid != "" {
params["cid"] = cid
}
if req.IsMobile {
params["device"] = deviceMobile
}
params["sign"] = easyPaySign(params, e.config["pkey"])
params["sign_type"] = signTypeMD5
@@ -106,7 +111,7 @@ func (e *EasyPay) createAPIPayment(ctx context.Context, req payment.CreatePaymen
params["cid"] = cid
}
if req.IsMobile {
params["device"] = "mobile"
params["device"] = deviceMobile
}
params["sign"] = easyPaySign(params, e.config["pkey"])
params["sign_type"] = signTypeMD5
@@ -120,6 +125,7 @@ func (e *EasyPay) createAPIPayment(ctx context.Context, req payment.CreatePaymen
Msg string `json:"msg"`
TradeNo string `json:"trade_no"`
PayURL string `json:"payurl"`
PayURL2 string `json:"payurl2"` // H5 mobile payment URL
QRCode string `json:"qrcode"`
}
if err := json.Unmarshal(body, &resp); err != nil {
@@ -128,7 +134,11 @@ func (e *EasyPay) createAPIPayment(ctx context.Context, req payment.CreatePaymen
if resp.Code != easypayCodeSuccess {
return nil, fmt.Errorf("easypay error: %s", resp.Msg)
}
return &payment.CreatePaymentResponse{TradeNo: resp.TradeNo, PayURL: resp.PayURL, QRCode: resp.QRCode}, nil
payURL := resp.PayURL
if req.IsMobile && resp.PayURL2 != "" {
payURL = resp.PayURL2
}
return &payment.CreatePaymentResponse{TradeNo: resp.TradeNo, PayURL: payURL, QRCode: resp.QRCode}, nil
}
// resolveURLs returns (notifyURL, returnURL) preferring request values,

View File

@@ -18,6 +18,9 @@ const (
BlockTypeFunction
)
// UsageMapHook is a callback that can modify usage data before it's emitted in SSE events.
type UsageMapHook func(usageMap map[string]any)
// StreamingProcessor 流式响应处理器
type StreamingProcessor struct {
blockType BlockType
@@ -30,6 +33,7 @@ type StreamingProcessor struct {
originalModel string
webSearchQueries []string
groundingChunks []GeminiGroundingChunk
usageMapHook UsageMapHook
// 累计 usage
inputTokens int
@@ -46,6 +50,28 @@ func NewStreamingProcessor(originalModel string) *StreamingProcessor {
}
}
// SetUsageMapHook sets an optional hook that modifies usage maps before they are emitted.
func (p *StreamingProcessor) SetUsageMapHook(fn UsageMapHook) {
p.usageMapHook = fn
}
func usageToMap(u ClaudeUsage) map[string]any {
m := map[string]any{
"input_tokens": u.InputTokens,
"output_tokens": u.OutputTokens,
}
if u.CacheCreationInputTokens > 0 {
m["cache_creation_input_tokens"] = u.CacheCreationInputTokens
}
if u.CacheReadInputTokens > 0 {
m["cache_read_input_tokens"] = u.CacheReadInputTokens
}
if u.ImageOutputTokens > 0 {
m["image_output_tokens"] = u.ImageOutputTokens
}
return m
}
// ProcessLine 处理 SSE 行,返回 Claude SSE 事件
func (p *StreamingProcessor) ProcessLine(line string) []byte {
line = strings.TrimSpace(line)
@@ -172,6 +198,13 @@ func (p *StreamingProcessor) emitMessageStart(v1Resp *V1InternalResponse) []byte
responseID = "msg_" + generateRandomID()
}
var usageValue any = usage
if p.usageMapHook != nil {
usageMap := usageToMap(usage)
p.usageMapHook(usageMap)
usageValue = usageMap
}
message := map[string]any{
"id": responseID,
"type": "message",
@@ -180,7 +213,7 @@ func (p *StreamingProcessor) emitMessageStart(v1Resp *V1InternalResponse) []byte
"model": p.originalModel,
"stop_reason": nil,
"stop_sequence": nil,
"usage": usage,
"usage": usageValue,
}
event := map[string]any{
@@ -492,13 +525,20 @@ func (p *StreamingProcessor) emitFinish(finishReason string) []byte {
ImageOutputTokens: p.imageOutputTokens,
}
var usageValue any = usage
if p.usageMapHook != nil {
usageMap := usageToMap(usage)
p.usageMapHook(usageMap)
usageValue = usageMap
}
deltaEvent := map[string]any{
"type": "message_delta",
"delta": map[string]any{
"stop_reason": stopReason,
"stop_sequence": nil,
},
"usage": usage,
"usage": usageValue,
}
_, _ = result.Write(p.formatSSE("message_delta", deltaEvent))

View File

@@ -27,13 +27,14 @@ func ChatCompletionsToResponses(req *ChatCompletionsRequest) (*ResponsesRequest,
}
out := &ResponsesRequest{
Model: req.Model,
Input: inputJSON,
Temperature: req.Temperature,
TopP: req.TopP,
Stream: true, // upstream always streams
Include: []string{"reasoning.encrypted_content"},
ServiceTier: req.ServiceTier,
Model: req.Model,
Instructions: req.Instructions,
Input: inputJSON,
Temperature: req.Temperature,
TopP: req.TopP,
Stream: true, // upstream always streams
Include: []string{"reasoning.encrypted_content"},
ServiceTier: req.ServiceTier,
}
storeFalse := false

View File

@@ -152,6 +152,7 @@ type AnthropicDelta struct {
// ResponsesRequest is the request body for POST /v1/responses.
type ResponsesRequest struct {
Model string `json:"model"`
Instructions string `json:"instructions,omitempty"`
Input json.RawMessage `json:"input"` // string or []ResponsesInputItem
MaxOutputTokens *int `json:"max_output_tokens,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
@@ -337,6 +338,7 @@ type ResponsesStreamEvent struct {
type ChatCompletionsRequest struct {
Model string `json:"model"`
Messages []ChatMessage `json:"messages"`
Instructions string `json:"instructions,omitempty"` // OpenAI Responses API compat
MaxTokens *int `json:"max_tokens,omitempty"`
MaxCompletionTokens *int `json:"max_completion_tokens,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`

View File

@@ -10,7 +10,13 @@ import (
)
func TestInit_DualOutput(t *testing.T) {
tmpDir := t.TempDir()
// Use os.MkdirTemp instead of t.TempDir to avoid cleanup failures
// when lumberjack holds file handles on Windows.
tmpDir, err := os.MkdirTemp("", "logger-test-*")
if err != nil {
t.Fatalf("create temp dir: %v", err)
}
t.Cleanup(func() { _ = os.RemoveAll(tmpDir) })
logPath := filepath.Join(tmpDir, "logs", "sub2api.log")
origStdout := os.Stdout
@@ -57,7 +63,9 @@ func TestInit_DualOutput(t *testing.T) {
L().Info("dual-output-info")
L().Warn("dual-output-warn")
Sync()
// Skip Sync() — on Windows, fsync on pipes deadlocks (FlushFileBuffers).
// The log data is already in the pipe buffer; closing writers is sufficient.
_ = stdoutW.Close()
_ = stderrW.Close()
@@ -166,7 +174,9 @@ func TestInit_CallerShouldPointToCallsite(t *testing.T) {
}
L().Info("caller-check")
Sync()
// Skip Sync() — on Windows, fsync on pipes deadlocks (FlushFileBuffers).
os.Stdout = origStdout
os.Stderr = origStderr
_ = stdoutW.Close()
logBytes, _ := io.ReadAll(stdoutR)

View File

@@ -77,7 +77,7 @@ func TestStdLogBridgeRoutesLevels(t *testing.T) {
log.Printf("service started")
log.Printf("Warning: queue full")
log.Printf("Forward request failed: timeout")
Sync()
// Skip Sync() — on Windows, fsync on pipes deadlocks (FlushFileBuffers).
_ = stdoutW.Close()
_ = stderrW.Close()
@@ -139,7 +139,7 @@ func TestLegacyPrintfRoutesLevels(t *testing.T) {
LegacyPrintf("service.test", "request started")
LegacyPrintf("service.test", "Warning: queue full")
LegacyPrintf("service.test", "forward failed: timeout")
Sync()
// Skip Sync() — on Windows, fsync on pipes deadlocks (FlushFileBuffers).
_ = stdoutW.Close()
_ = stderrW.Close()

View File

@@ -113,9 +113,11 @@ func (r *usageBillingRepository) applyUsageBillingEffects(ctx context.Context, t
}
if cmd.BalanceCost > 0 {
if err := deductUsageBillingBalance(ctx, tx, cmd.UserID, cmd.BalanceCost); err != nil {
newBalance, err := deductUsageBillingBalance(ctx, tx, cmd.UserID, cmd.BalanceCost)
if err != nil {
return err
}
result.NewBalance = &newBalance
}
if cmd.APIKeyQuotaCost > 0 {
@@ -133,9 +135,11 @@ func (r *usageBillingRepository) applyUsageBillingEffects(ctx context.Context, t
}
if cmd.AccountQuotaCost > 0 && (strings.EqualFold(cmd.AccountType, service.AccountTypeAPIKey) || strings.EqualFold(cmd.AccountType, service.AccountTypeBedrock)) {
if err := incrementUsageBillingAccountQuota(ctx, tx, cmd.AccountID, cmd.AccountQuotaCost); err != nil {
quotaState, err := incrementUsageBillingAccountQuota(ctx, tx, cmd.AccountID, cmd.AccountQuotaCost)
if err != nil {
return err
}
result.QuotaState = quotaState
}
return nil
@@ -169,24 +173,22 @@ func incrementUsageBillingSubscription(ctx context.Context, tx *sql.Tx, subscrip
return service.ErrSubscriptionNotFound
}
func deductUsageBillingBalance(ctx context.Context, tx *sql.Tx, userID int64, amount float64) error {
res, err := tx.ExecContext(ctx, `
func deductUsageBillingBalance(ctx context.Context, tx *sql.Tx, userID int64, amount float64) (float64, error) {
var newBalance float64
err := tx.QueryRowContext(ctx, `
UPDATE users
SET balance = balance - $1,
updated_at = NOW()
WHERE id = $2 AND deleted_at IS NULL
`, amount, userID)
RETURNING balance
`, amount, userID).Scan(&newBalance)
if errors.Is(err, sql.ErrNoRows) {
return 0, service.ErrUserNotFound
}
if err != nil {
return err
return 0, err
}
affected, err := res.RowsAffected()
if err != nil {
return err
}
if affected > 0 {
return nil
}
return service.ErrUserNotFound
return newBalance, nil
}
func incrementUsageBillingAPIKeyQuota(ctx context.Context, tx *sql.Tx, apiKeyID int64, amount float64) (bool, error) {
@@ -240,7 +242,7 @@ func incrementUsageBillingAPIKeyRateLimit(ctx context.Context, tx *sql.Tx, apiKe
return nil
}
func incrementUsageBillingAccountQuota(ctx context.Context, tx *sql.Tx, accountID int64, amount float64) error {
func incrementUsageBillingAccountQuota(ctx context.Context, tx *sql.Tx, accountID int64, amount float64) (*service.AccountQuotaState, error) {
rows, err := tx.QueryContext(ctx,
`UPDATE accounts SET extra = (
COALESCE(extra, '{}'::jsonb)
@@ -279,32 +281,40 @@ func incrementUsageBillingAccountQuota(ctx context.Context, tx *sql.Tx, accountI
WHERE id = $2 AND deleted_at IS NULL
RETURNING
COALESCE((extra->>'quota_used')::numeric, 0),
COALESCE((extra->>'quota_limit')::numeric, 0)`,
COALESCE((extra->>'quota_limit')::numeric, 0),
COALESCE((extra->>'quota_daily_used')::numeric, 0),
COALESCE((extra->>'quota_daily_limit')::numeric, 0),
COALESCE((extra->>'quota_weekly_used')::numeric, 0),
COALESCE((extra->>'quota_weekly_limit')::numeric, 0)`,
amount, accountID)
if err != nil {
return err
return nil, err
}
defer func() { _ = rows.Close() }()
var newUsed, limit float64
var state service.AccountQuotaState
if rows.Next() {
if err := rows.Scan(&newUsed, &limit); err != nil {
return err
if err := rows.Scan(
&state.TotalUsed, &state.TotalLimit,
&state.DailyUsed, &state.DailyLimit,
&state.WeeklyUsed, &state.WeeklyLimit,
); err != nil {
return nil, err
}
} else {
if err := rows.Err(); err != nil {
return err
return nil, err
}
return service.ErrAccountNotFound
return nil, service.ErrAccountNotFound
}
if err := rows.Err(); err != nil {
return err
return nil, err
}
if limit > 0 && newUsed >= limit && (newUsed-amount) < limit {
if state.TotalLimit > 0 && state.TotalUsed >= state.TotalLimit && (state.TotalUsed-amount) < state.TotalLimit {
if err := enqueueSchedulerOutbox(ctx, tx, service.SchedulerOutboxEventAccountChanged, &accountID, nil, nil); err != nil {
logger.LegacyPrintf("repository.usage_billing", "[SchedulerOutbox] enqueue quota exceeded failed: account=%d err=%v", accountID, err)
return err
return nil, err
}
}
return nil
return &state, nil
}

View File

@@ -100,7 +100,7 @@ func (r *userGroupRateRepository) GetByGroupID(ctx context.Context, groupID int6
query := `
SELECT ugr.user_id, u.username, u.email, COALESCE(u.notes, ''), u.status, ugr.rate_multiplier
FROM user_group_rate_multipliers ugr
JOIN users u ON u.id = ugr.user_id
JOIN users u ON u.id = ugr.user_id AND u.deleted_at IS NULL
WHERE ugr.group_id = $1
ORDER BY ugr.user_id
`

View File

@@ -26,6 +26,7 @@ func RegisterPaymentRoutes(
authenticated.Use(middleware.BackendModeUserGuard(settingService))
{
authenticated.GET("/config", paymentHandler.GetPaymentConfig)
authenticated.GET("/checkout-info", paymentHandler.GetCheckoutInfo)
authenticated.GET("/plans", paymentHandler.GetPlans)
authenticated.GET("/channels", paymentHandler.GetChannels)
authenticated.GET("/limits", paymentHandler.GetLimits)
@@ -33,6 +34,7 @@ func RegisterPaymentRoutes(
orders := authenticated.Group("/orders")
{
orders.POST("", paymentHandler.CreateOrder)
orders.POST("/verify", paymentHandler.VerifyOrder)
orders.GET("/my", paymentHandler.GetMyOrders)
orders.GET("/:id", paymentHandler.GetOrder)
orders.POST("/:id/cancel", paymentHandler.CancelOrder)
@@ -52,6 +54,8 @@ func RegisterPaymentRoutes(
// --- Webhook endpoints (no auth) ---
webhook := v1.Group("/payment/webhook")
{
// EasyPay sends GET callbacks with query params
webhook.GET("/easypay", webhookHandler.EasyPayNotify)
webhook.POST("/easypay", webhookHandler.EasyPayNotify)
webhook.POST("/alipay", webhookHandler.AlipayNotify)
webhook.POST("/wxpay", webhookHandler.WxpayNotify)

View File

@@ -6,6 +6,7 @@ import (
"encoding/hex"
"errors"
"fmt"
"log/slog"
"math/rand/v2"
"time"
@@ -99,7 +100,7 @@ func (s *APIKeyService) StartAuthCacheInvalidationSubscriber(ctx context.Context
s.authCacheL1.Del(cacheKey)
}); err != nil {
// Log but don't fail - L1 cache will still work, just without cross-instance invalidation
println("[Service] Warning: failed to start auth cache invalidation subscriber:", err.Error())
slog.Warn("failed to start auth cache invalidation subscriber", "error", err)
}
}

View File

@@ -81,9 +81,9 @@ type wildcardMappingEntry struct {
type channelCache struct {
// 热路径查找
pricingByGroupModel map[channelModelKey]*ChannelModelPricing // (groupID, platform, model) → 定价
wildcardByGroupPlatform map[channelGroupPlatformKey][]*wildcardPricingEntry // (groupID, platform) → 通配符定价(前缀长度降序
wildcardByGroupPlatform map[channelGroupPlatformKey][]*wildcardPricingEntry // (groupID, platform) → 通配符定价(按配置顺序,先匹配先使用
mappingByGroupModel map[channelModelKey]string // (groupID, platform, model) → 映射目标
wildcardMappingByGP map[channelGroupPlatformKey][]*wildcardMappingEntry // (groupID, platform) → 通配符映射(前缀长度降序
wildcardMappingByGP map[channelGroupPlatformKey][]*wildcardMappingEntry // (groupID, platform) → 通配符映射(按配置顺序,先匹配先使用
channelByGroupID map[int64]*Channel // groupID → 渠道
groupPlatform map[int64]string // groupID → platform
@@ -680,6 +680,7 @@ func (s *ChannelService) Create(ctx context.Context, input *CreateChannelInput)
ModelPricing: input.ModelPricing,
ModelMapping: input.ModelMapping,
Features: input.Features,
FeaturesConfig: input.FeaturesConfig,
ApplyPricingToAccountStats: input.ApplyPricingToAccountStats,
AccountStatsPricingRules: input.AccountStatsPricingRules,
}
@@ -780,6 +781,9 @@ func (s *ChannelService) applyUpdateInput(ctx context.Context, channel *Channel,
if input.BillingModelSource != "" {
channel.BillingModelSource = input.BillingModelSource
}
if input.FeaturesConfig != nil {
channel.FeaturesConfig = input.FeaturesConfig
}
if input.ApplyPricingToAccountStats != nil {
channel.ApplyPricingToAccountStats = *input.ApplyPricingToAccountStats
}
@@ -959,6 +963,7 @@ type CreateChannelInput struct {
BillingModelSource string
RestrictModels bool
Features string
FeaturesConfig map[string]any
ApplyPricingToAccountStats bool
AccountStatsPricingRules []AccountStatsPricingRule
}
@@ -974,6 +979,7 @@ type UpdateChannelInput struct {
BillingModelSource string
RestrictModels *bool
Features *string
FeaturesConfig map[string]any
ApplyPricingToAccountStats *bool
AccountStatsPricingRules *[]AccountStatsPricingRule
}

View File

@@ -343,8 +343,9 @@ func (s *ConcurrencyService) StartSlotCleanupWorker(accountRepo AccountRepositor
}()
}
// GetAccountConcurrencyBatch gets current concurrency counts for multiple accounts
// Returns a map of accountID -> current concurrency count
// GetAccountConcurrencyBatch gets current concurrency counts for multiple accounts.
// Uses a detached context with timeout to prevent HTTP request cancellation from
// causing the entire batch to fail (which would show all concurrency as 0).
func (s *ConcurrencyService) GetAccountConcurrencyBatch(ctx context.Context, accountIDs []int64) (map[int64]int, error) {
if len(accountIDs) == 0 {
return map[int64]int{}, nil
@@ -356,5 +357,11 @@ func (s *ConcurrencyService) GetAccountConcurrencyBatch(ctx context.Context, acc
}
return result, nil
}
return s.cache.GetAccountConcurrencyBatch(ctx, accountIDs)
// Use a detached context so that a cancelled HTTP request doesn't cause
// the Redis pipeline to fail and return all-zero concurrency counts.
redisCtx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
defer cancel()
return s.cache.GetAccountConcurrencyBatch(redisCtx, accountIDs)
}

View File

@@ -214,17 +214,23 @@ func writeWebSearchStreamResponse(
) (*ForwardResult, error) {
msgID := webSearchMsgIDPrefix + uuid.New().String()
toolUseID := webSearchToolUseIDPrefix + uuid.New().String()[:16]
textSummary := buildTextSummary(query, resp.Results)
setSSEHeaders(c)
if err := writeSSEMessageStart(c.Writer, msgID, model); err != nil {
return nil, fmt.Errorf("web search emulation: SSE write: %w", err)
w := c.Writer
for _, fn := range []func() error{
func() error { return writeSSEMessageStart(w, msgID, model) },
func() error { return writeSSEServerToolUse(w, toolUseID, query, 0) },
func() error { return writeSSEToolResult(w, toolUseID, resp.Results, 1) },
func() error { return writeSSETextBlock(w, textSummary, 2) },
func() error { return writeSSEMessageEnd(w, len(textSummary)/tokenEstimateDivisor) },
} {
if err := fn(); err != nil {
slog.Warn("web search emulation: SSE write failed, stopping", "error", err)
break
}
}
writeSSEServerToolUse(c.Writer, toolUseID, query, 0)
writeSSEToolResult(c.Writer, toolUseID, resp.Results, 1)
textSummary := buildTextSummary(query, resp.Results)
writeSSETextBlock(c.Writer, textSummary, 2)
writeSSEMessageEnd(c.Writer, len(textSummary)/tokenEstimateDivisor)
c.Writer.Flush()
w.Flush()
return &ForwardResult{Model: model, Duration: time.Since(startTime), Usage: ClaudeUsage{}}, nil
}
@@ -249,7 +255,7 @@ func writeSSEMessageStart(w http.ResponseWriter, msgID, model string) error {
return flushSSEJSON(w, "message_start", evt)
}
func writeSSEServerToolUse(w http.ResponseWriter, toolUseID, query string, index int) {
func writeSSEServerToolUse(w http.ResponseWriter, toolUseID, query string, index int) error {
start := map[string]any{
"type": "content_block_start", "index": index,
"content_block": map[string]any{
@@ -257,11 +263,13 @@ func writeSSEServerToolUse(w http.ResponseWriter, toolUseID, query string, index
"name": toolNameWebSearch, "input": map[string]string{"query": query},
},
}
_ = flushSSEJSON(w, "content_block_start", start)
_ = flushSSEJSON(w, "content_block_stop", map[string]any{"type": "content_block_stop", "index": index})
if err := flushSSEJSON(w, "content_block_start", start); err != nil {
return err
}
return flushSSEJSON(w, "content_block_stop", map[string]any{"type": "content_block_stop", "index": index})
}
func writeSSEToolResult(w http.ResponseWriter, toolUseID string, results []websearch.SearchResult, index int) {
func writeSSEToolResult(w http.ResponseWriter, toolUseID string, results []websearch.SearchResult, index int) error {
start := map[string]any{
"type": "content_block_start", "index": index,
"content_block": map[string]any{
@@ -269,40 +277,48 @@ func writeSSEToolResult(w http.ResponseWriter, toolUseID string, results []webse
"content": buildSearchResultBlocks(results),
},
}
_ = flushSSEJSON(w, "content_block_start", start)
_ = flushSSEJSON(w, "content_block_stop", map[string]any{"type": "content_block_stop", "index": index})
if err := flushSSEJSON(w, "content_block_start", start); err != nil {
return err
}
return flushSSEJSON(w, "content_block_stop", map[string]any{"type": "content_block_stop", "index": index})
}
func writeSSETextBlock(w http.ResponseWriter, text string, index int) {
_ = flushSSEJSON(w, "content_block_start", map[string]any{
func writeSSETextBlock(w http.ResponseWriter, text string, index int) error {
if err := flushSSEJSON(w, "content_block_start", map[string]any{
"type": "content_block_start", "index": index,
"content_block": map[string]any{"type": "text", "text": ""},
})
_ = flushSSEJSON(w, "content_block_delta", map[string]any{
}); err != nil {
return err
}
if err := flushSSEJSON(w, "content_block_delta", map[string]any{
"type": "content_block_delta", "index": index,
"delta": map[string]string{"type": "text_delta", "text": text},
})
_ = flushSSEJSON(w, "content_block_stop", map[string]any{"type": "content_block_stop", "index": index})
}); err != nil {
return err
}
return flushSSEJSON(w, "content_block_stop", map[string]any{"type": "content_block_stop", "index": index})
}
func writeSSEMessageEnd(w http.ResponseWriter, outputTokens int) {
_ = flushSSEJSON(w, "message_delta", map[string]any{
func writeSSEMessageEnd(w http.ResponseWriter, outputTokens int) error {
if err := flushSSEJSON(w, "message_delta", map[string]any{
"type": "message_delta",
"delta": map[string]any{"stop_reason": "end_turn", "stop_sequence": nil},
"usage": map[string]int{"output_tokens": outputTokens},
})
_ = flushSSEJSON(w, "message_stop", map[string]string{"type": "message_stop"})
}); err != nil {
return err
}
return flushSSEJSON(w, "message_stop", map[string]string{"type": "message_stop"})
}
// flushSSEJSON marshals data to JSON and writes an SSE event. Returns error on marshal failure.
// flushSSEJSON marshals data to JSON and writes an SSE event.
func flushSSEJSON(w http.ResponseWriter, event string, data any) error {
b, err := json.Marshal(data)
if err != nil {
slog.Error("web search emulation: failed to marshal SSE event",
"event", event, "error", err)
return err
return fmt.Errorf("marshal: %w", err)
}
if _, err := fmt.Fprintf(w, "event: %s\ndata: %s\n\n", event, b); err != nil {
return fmt.Errorf("write: %w", err)
}
fmt.Fprintf(w, "event: %s\ndata: %s\n\n", event, b)
if f, ok := w.(http.Flusher); ok {
f.Flush()
}

View File

@@ -64,12 +64,9 @@ func (s *OpsService) getAccountsLoadMapBestEffort(ctx context.Context, accounts
if acc.ID <= 0 {
continue
}
c := acc.Concurrency
if c <= 0 {
c = 1
}
if prev, ok := unique[acc.ID]; !ok || c > prev {
unique[acc.ID] = c
lf := acc.EffectiveLoadFactor()
if prev, ok := unique[acc.ID]; !ok || lf > prev {
unique[acc.ID] = lf
}
}

View File

@@ -391,7 +391,7 @@ func (c *OpsMetricsCollector) collectConcurrencyQueueDepth(parentCtx context.Con
}
batch = append(batch, AccountWithConcurrency{
ID: acc.ID,
MaxConcurrency: acc.Concurrency,
MaxConcurrency: acc.EffectiveLoadFactor(),
})
}
if len(batch) == 0 {

View File

@@ -183,6 +183,15 @@ func TestOpsSystemLogSink_StartStopAndFlushSuccess(t *testing.T) {
if strings.TrimSpace(item.Message) == "" {
t.Fatalf("message should not be empty")
}
// writtenCount is incremented after BatchInsertSystemLogsFn returns,
// so poll briefly to avoid a race between the done signal and the atomic add.
deadline := time.Now().Add(time.Second)
for time.Now().Before(deadline) {
if sink.Health().WrittenCount > 0 {
break
}
time.Sleep(time.Millisecond)
}
health := sink.Health()
if health.WrittenCount == 0 {
t.Fatalf("written_count should be >0")

View File

@@ -113,11 +113,11 @@ func (s *PaymentConfigService) GetGroupInfoMap(ctx context.Context, plans []*dbe
}
func (s *PaymentConfigService) ListPlans(ctx context.Context) ([]*dbent.SubscriptionPlan, error) {
return s.entClient.SubscriptionPlan.Query().Order(subscriptionplan.ByCreatedAt()).All(ctx)
return s.entClient.SubscriptionPlan.Query().Order(subscriptionplan.BySortOrder()).All(ctx)
}
func (s *PaymentConfigService) ListPlansForSale(ctx context.Context) ([]*dbent.SubscriptionPlan, error) {
return s.entClient.SubscriptionPlan.Query().Where(subscriptionplan.ForSaleEQ(true)).Order(subscriptionplan.ByCreatedAt()).All(ctx)
return s.entClient.SubscriptionPlan.Query().Where(subscriptionplan.ForSaleEQ(true)).Order(subscriptionplan.BySortOrder()).All(ctx)
}
func (s *PaymentConfigService) CreatePlan(ctx context.Context, req CreatePlanRequest) (*dbent.SubscriptionPlan, error) {

View File

@@ -140,6 +140,16 @@ func (s *SettingService) SaveWebSearchEmulationConfig(ctx context.Context, cfg *
}
s.mergeExistingAPIKeys(ctx, cfg)
// After merge, validate all enabled providers have API keys
if cfg.Enabled {
for _, p := range cfg.Providers {
if p.APIKey == "" {
return infraerrors.BadRequest("MISSING_API_KEY",
fmt.Sprintf("provider %s has no API key configured", p.Type))
}
}
}
data, err := json.Marshal(cfg)
if err != nil {
return fmt.Errorf("websearch: marshal config: %w", err)

View File

@@ -49,6 +49,7 @@ export interface Channel {
status: string
billing_model_source: string // "requested" | "upstream"
restrict_models: boolean
features_config?: Record<string, unknown>
group_ids: number[]
model_pricing: ChannelModelPricing[]
model_mapping: Record<string, Record<string, string>> // platform → {src→dst}
@@ -66,6 +67,7 @@ export interface CreateChannelRequest {
model_mapping?: Record<string, Record<string, string>>
billing_model_source?: string
restrict_models?: boolean
features_config?: Record<string, unknown>
apply_pricing_to_account_stats?: boolean
account_stats_pricing_rules?: AccountStatsPricingRule[]
}
@@ -79,6 +81,7 @@ export interface UpdateChannelRequest {
model_mapping?: Record<string, Record<string, string>>
billing_model_source?: string
restrict_models?: boolean
features_config?: Record<string, unknown>
apply_pricing_to_account_stats?: boolean
account_stats_pricing_rules?: AccountStatsPricingRule[]
}

View File

@@ -429,8 +429,6 @@ export interface AdminGroup extends Group {
// MCP XML 协议注入(仅 antigravity 平台使用)
mcp_xml_inject: boolean
// Claude usage 模拟开关(仅 anthropic 平台使用)
simulate_claude_max_enabled: boolean
// 支持的模型系列(仅 antigravity 平台使用)
supported_model_scopes?: string[]
@@ -523,7 +521,6 @@ export interface CreateGroupRequest {
fallback_group_id?: number | null
fallback_group_id_on_invalid_request?: number | null
mcp_xml_inject?: boolean
simulate_claude_max_enabled?: boolean
supported_model_scopes?: string[]
require_oauth_only?: boolean
require_privacy_set?: boolean
@@ -549,7 +546,6 @@ export interface UpdateGroupRequest {
fallback_group_id?: number | null
fallback_group_id_on_invalid_request?: number | null
mcp_xml_inject?: boolean
simulate_claude_max_enabled?: boolean
supported_model_scopes?: string[]
require_oauth_only?: boolean
require_privacy_set?: boolean
@@ -691,6 +687,7 @@ export interface Account {
// Extra fields including Codex usage and model-level rate limits (Antigravity smart retry)
extra?: (CodexUsageSnapshot & {
model_rate_limits?: Record<string, { rate_limited_at: string; rate_limit_reset_at: string }>
antigravity_credits_overages?: Record<string, { activated_at: string; active_until: string }>
} & Record<string, unknown>)
proxy_id: number | null
concurrency: number
@@ -752,12 +749,6 @@ export interface Account {
custom_base_url_enabled?: boolean | null
custom_base_url?: string | null
// 客户端亲和调度(仅 Anthropic/Antigravity 平台有效)
// 启用后新会话会优先调度到客户端之前使用过的账号
client_affinity_enabled?: boolean | null
affinity_client_count?: number | null
affinity_clients?: string[] | null
// API Key 账号配额限制
quota_limit?: number | null
quota_used?: number | null
@@ -1066,6 +1057,8 @@ export interface AdminUsageLog extends UsageLog {
// 账号计费倍率(仅管理员可见)
account_rate_multiplier?: number | null
// 自定义定价规则计算的账号统计费用nil 时使用 total_cost * multiplier
account_stats_cost?: number | null
// 渠道 ID 和计费等级(仅管理员可见)
channel_id?: number | null

View File

@@ -3253,6 +3253,7 @@ const editForm = reactive({
fallback_group_id_on_invalid_request: null as number | null,
// OpenAI Messages 调度配置(仅 openai 平台使用)
allow_messages_dispatch: false,
default_mapped_model: '',
opus_mapped_model: editMessagesDispatchDefaults.opus_mapped_model,
sonnet_mapped_model: editMessagesDispatchDefaults.sonnet_mapped_model,
haiku_mapped_model: editMessagesDispatchDefaults.haiku_mapped_model,
@@ -3732,6 +3733,19 @@ watch(
},
);
watch(
() => editForm.platform,
(newVal) => {
if (!['anthropic', 'antigravity'].includes(newVal)) {
editForm.fallback_group_id_on_invalid_request = null
}
if (newVal !== 'openai') {
editForm.allow_messages_dispatch = false
editForm.default_mapped_model = ''
}
}
)
// 点击外部关闭账号搜索下拉框
const handleClickOutside = (event: MouseEvent) => {
const target = event.target as HTMLElement;

View File

@@ -495,7 +495,7 @@ const exportToExcel = async () => {
log.cache_read_cost?.toFixed(6) || '0.000000', log.cache_creation_cost?.toFixed(6) || '0.000000',
log.rate_multiplier?.toPrecision(4) || '1.00', (log.account_rate_multiplier ?? 1).toPrecision(4),
log.total_cost?.toFixed(6) || '0.000000', log.actual_cost?.toFixed(6) || '0.000000',
(log.total_cost * (log.account_rate_multiplier ?? 1)).toFixed(6), log.first_token_ms ?? '', log.duration_ms,
((log.account_stats_cost ?? log.total_cost) * (log.account_rate_multiplier ?? 1)).toFixed(6), log.first_token_ms ?? '', log.duration_ms,
log.request_id || '', log.user_agent || '', log.ip_address || ''
])
if (rows.length) {

View File

@@ -117,6 +117,7 @@ function getPlanNameClass(groupId: number): string {
return group ? platformTextClass(group.platform) : 'text-gray-900 dark:text-white'
}
// ==================== Plans ====================
const plansLoading = ref(false)
@@ -133,6 +134,7 @@ const planColumns = computed((): Column[] => [
{ key: 'price', label: t('payment.admin.price') },
{ key: 'validity_days', label: t('payment.admin.validityDays') },
{ key: 'for_sale', label: t('payment.admin.forSale') },
{ key: 'sort_order', label: t('payment.admin.sortOrder') },
{ key: 'actions', label: t('common.actions') },
])
@@ -157,6 +159,7 @@ function openPlanEdit(plan: SubscriptionPlan | null) {
showPlanDialog.value = true
}
/** Quick toggle for_sale from the list */
async function toggleForSale(plan: SubscriptionPlan) {
try {

View File

@@ -42,6 +42,9 @@
<div><label class="input-label">{{ t('payment.admin.validityDays') }} <span class="text-red-500">*</span></label><input v-model.number="planForm.validity_days" type="number" min="1" class="input" required /></div>
<div><label class="input-label">{{ t('payment.admin.validityUnit') }} <span class="text-red-500">*</span></label><Select v-model="planForm.validity_unit" :options="validityUnitOptions" /></div>
</div>
<div class="grid grid-cols-2 gap-4">
<div><label class="input-label">{{ t('payment.admin.sortOrder') }}</label><input v-model.number="planForm.sort_order" type="number" min="0" class="input" /></div>
</div>
<div>
<label class="input-label">{{ t('payment.admin.features') }}</label>
<textarea v-model="planFeaturesText" rows="3" class="input" :placeholder="t('payment.admin.featuresPlaceholder')"></textarea>
@@ -102,7 +105,7 @@ const { t } = useI18n()
const appStore = useAppStore()
const saving = ref(false)
const planForm = reactive({ name: '', group_id: null as number | null, description: '', price: 0, original_price: 0, validity_days: 30, validity_unit: 'days', for_sale: true })
const planForm = reactive({ name: '', group_id: null as number | null, description: '', price: 0, original_price: 0, validity_days: 30, validity_unit: 'days', sort_order: 0, for_sale: true })
const planFeaturesText = ref('')
const validityUnitOptions = computed(() => [
@@ -130,10 +133,10 @@ const selectedGroupInfo = computed(() => {
watch(() => props.show, (visible) => {
if (!visible) return
if (props.plan) {
Object.assign(planForm, { name: props.plan.name, group_id: props.plan.group_id, description: props.plan.description, price: props.plan.price, original_price: props.plan.original_price || 0, validity_days: props.plan.validity_days, validity_unit: props.plan.validity_unit || 'days', for_sale: props.plan.for_sale })
Object.assign(planForm, { name: props.plan.name, group_id: props.plan.group_id, description: props.plan.description, price: props.plan.price, original_price: props.plan.original_price || 0, validity_days: props.plan.validity_days, validity_unit: props.plan.validity_unit || 'days', sort_order: props.plan.sort_order || 0, for_sale: props.plan.for_sale })
planFeaturesText.value = (props.plan.features || []).join('\n')
} else {
Object.assign(planForm, { name: '', group_id: null, description: '', price: 0, original_price: 0, validity_days: 30, validity_unit: 'days', for_sale: true })
Object.assign(planForm, { name: '', group_id: null, description: '', price: 0, original_price: 0, validity_days: 30, validity_unit: 'days', sort_order: 0, for_sale: true })
planFeaturesText.value = ''
}
})
@@ -149,6 +152,7 @@ function buildPlanPayload() {
original_price: planForm.original_price || 0,
validity_days: planForm.validity_days,
validity_unit: planForm.validity_unit,
sort_order: planForm.sort_order,
for_sale: planForm.for_sale,
features,
}

View File

@@ -102,10 +102,12 @@ interface ReturnInfo {
}
const returnInfo = ref<ReturnInfo | null>(null)
const SUCCESS_STATUSES = new Set(['COMPLETED', 'PAID', 'RECHARGING'])
const isSuccess = computed(() => {
// Always prioritize actual order status from backend
if (order.value) {
return order.value.status === 'COMPLETED' || order.value.status === 'PAID'
return SUCCESS_STATUSES.has(order.value.status)
}
// Fallback only when order not loaded
if (route.query.status === 'success') return true

View File

@@ -22,7 +22,7 @@
<Icon name="x" size="sm" />
<span>{{ t('payment.orders.cancel') }}</span>
</button>
<button v-if="row.status === 'COMPLETED'" @click="openRefundDialog(row)" class="inline-flex items-center gap-1 rounded-md px-2 py-1 text-xs font-medium text-purple-600 hover:bg-purple-50 dark:text-purple-400 dark:hover:bg-purple-900/20">
<button v-if="canRequestRefund(row)" @click="openRefundDialog(row)" class="inline-flex items-center gap-1 rounded-md px-2 py-1 text-xs font-medium text-purple-600 hover:bg-purple-50 dark:text-purple-400 dark:hover:bg-purple-900/20">
<Icon name="dollar" size="sm" />
<span>{{ t('payment.orders.requestRefund') }}</span>
</button>
@@ -102,6 +102,7 @@ const appStore = useAppStore()
const loading = ref(false)
const actionLoading = ref(false)
const orders = ref<PaymentOrder[]>([])
const refundEligibleProviders = ref<Set<string>>(new Set())
const currentFilter = ref('')
const cancelTargetId = ref<number | null>(null)
const refundTarget = ref<PaymentOrder | null>(null)
@@ -171,5 +172,18 @@ async function confirmRefund() {
}
}
onMounted(() => fetchOrders())
function canRequestRefund(order: PaymentOrder): boolean {
if (order.status !== 'COMPLETED') return false
if (!order.provider_instance_id) return false
return refundEligibleProviders.value.has(order.provider_instance_id)
}
async function loadRefundEligibility() {
try {
const res = await paymentAPI.getRefundEligibleProviders()
refundEligibleProviders.value = new Set(res.data.provider_instance_ids || [])
} catch { /* ignore — default to hiding refund button */ }
}
onMounted(() => { fetchOrders(); loadRefundEligibility() })
</script>