fix(openai): restore ws usage window display

This commit is contained in:
神乐
2026-03-06 20:46:10 +08:00
parent 005d0c5f53
commit 838ada8864
9 changed files with 490 additions and 11 deletions

View File

@@ -305,6 +305,9 @@ func (h *OpenAIGatewayHandler) Responses(c *gin.Context) {
return return
} }
if result != nil { if result != nil {
if account.Type == service.AccountTypeOAuth {
h.gatewayService.UpdateCodexUsageSnapshotFromHeaders(c.Request.Context(), account.ID, result.ResponseHeaders)
}
h.gatewayService.ReportOpenAIAccountScheduleResult(account.ID, true, result.FirstTokenMs) h.gatewayService.ReportOpenAIAccountScheduleResult(account.ID, true, result.FirstTokenMs)
} else { } else {
h.gatewayService.ReportOpenAIAccountScheduleResult(account.ID, true, nil) h.gatewayService.ReportOpenAIAccountScheduleResult(account.ID, true, nil)
@@ -840,6 +843,9 @@ func (h *OpenAIGatewayHandler) ResponsesWebSocket(c *gin.Context) {
if turnErr != nil || result == nil { if turnErr != nil || result == nil {
return return
} }
if account.Type == service.AccountTypeOAuth {
h.gatewayService.UpdateCodexUsageSnapshotFromHeaders(ctx, account.ID, result.ResponseHeaders)
}
h.gatewayService.ReportOpenAIAccountScheduleResult(account.ID, true, result.FirstTokenMs) h.gatewayService.ReportOpenAIAccountScheduleResult(account.ID, true, result.FirstTokenMs)
h.submitUsageRecordTask(func(taskCtx context.Context) { h.submitUsageRecordTask(func(taskCtx context.Context) {
if err := h.gatewayService.RecordUsage(taskCtx, &service.OpenAIRecordUsageInput{ if err := h.gatewayService.RecordUsage(taskCtx, &service.OpenAIRecordUsageInput{

View File

@@ -1,13 +1,18 @@
package service package service
import ( import (
"bytes"
"context" "context"
"encoding/json"
"fmt" "fmt"
"log" "log"
"net/http"
"strings" "strings"
"sync" "sync"
"time" "time"
httppool "github.com/Wei-Shaw/sub2api/internal/pkg/httpclient"
openaipkg "github.com/Wei-Shaw/sub2api/internal/pkg/openai"
"github.com/Wei-Shaw/sub2api/internal/pkg/pagination" "github.com/Wei-Shaw/sub2api/internal/pkg/pagination"
"github.com/Wei-Shaw/sub2api/internal/pkg/timezone" "github.com/Wei-Shaw/sub2api/internal/pkg/timezone"
"github.com/Wei-Shaw/sub2api/internal/pkg/usagestats" "github.com/Wei-Shaw/sub2api/internal/pkg/usagestats"
@@ -90,6 +95,7 @@ type antigravityUsageCache struct {
const ( const (
apiCacheTTL = 3 * time.Minute apiCacheTTL = 3 * time.Minute
windowStatsCacheTTL = 1 * time.Minute windowStatsCacheTTL = 1 * time.Minute
openAIProbeCacheTTL = 10 * time.Minute
) )
// UsageCache 封装账户使用量相关的缓存 // UsageCache 封装账户使用量相关的缓存
@@ -97,6 +103,7 @@ type UsageCache struct {
apiCache sync.Map // accountID -> *apiUsageCache apiCache sync.Map // accountID -> *apiUsageCache
windowStatsCache sync.Map // accountID -> *windowStatsCache windowStatsCache sync.Map // accountID -> *windowStatsCache
antigravityCache sync.Map // accountID -> *antigravityUsageCache antigravityCache sync.Map // accountID -> *antigravityUsageCache
openAIProbeCache sync.Map // accountID -> time.Time
} }
// NewUsageCache 创建 UsageCache 实例 // NewUsageCache 创建 UsageCache 实例
@@ -224,6 +231,14 @@ func (s *AccountUsageService) GetUsage(ctx context.Context, accountID int64) (*U
return nil, fmt.Errorf("get account failed: %w", err) return nil, fmt.Errorf("get account failed: %w", err)
} }
if account.Platform == PlatformOpenAI && account.Type == AccountTypeOAuth {
usage, err := s.getOpenAIUsage(ctx, account)
if err == nil {
s.tryClearRecoverableAccountError(ctx, account)
}
return usage, err
}
if account.Platform == PlatformGemini { if account.Platform == PlatformGemini {
usage, err := s.getGeminiUsage(ctx, account) usage, err := s.getGeminiUsage(ctx, account)
if err == nil { if err == nil {
@@ -288,6 +303,161 @@ func (s *AccountUsageService) GetUsage(ctx context.Context, accountID int64) (*U
return nil, fmt.Errorf("account type %s does not support usage query", account.Type) return nil, fmt.Errorf("account type %s does not support usage query", account.Type)
} }
func (s *AccountUsageService) getOpenAIUsage(ctx context.Context, account *Account) (*UsageInfo, error) {
now := time.Now()
usage := &UsageInfo{UpdatedAt: &now}
if account == nil {
return usage, nil
}
if progress := buildCodexUsageProgressFromExtra(account.Extra, "5h", now); progress != nil {
usage.FiveHour = progress
}
if progress := buildCodexUsageProgressFromExtra(account.Extra, "7d", now); progress != nil {
usage.SevenDay = progress
}
if (usage.FiveHour == nil || usage.SevenDay == nil) && s.shouldProbeOpenAICodexSnapshot(account.ID, now) {
if updates, err := s.probeOpenAICodexSnapshot(ctx, account); err == nil && len(updates) > 0 {
mergeAccountExtra(account, updates)
if usage.UpdatedAt == nil {
usage.UpdatedAt = &now
}
if progress := buildCodexUsageProgressFromExtra(account.Extra, "5h", now); progress != nil {
usage.FiveHour = progress
}
if progress := buildCodexUsageProgressFromExtra(account.Extra, "7d", now); progress != nil {
usage.SevenDay = progress
}
}
}
if s.usageLogRepo == nil {
return usage, nil
}
if stats, err := s.usageLogRepo.GetAccountWindowStats(ctx, account.ID, now.Add(-5*time.Hour)); err == nil {
windowStats := windowStatsFromAccountStats(stats)
if hasMeaningfulWindowStats(windowStats) {
if usage.FiveHour == nil {
usage.FiveHour = &UsageProgress{Utilization: 0}
}
usage.FiveHour.WindowStats = windowStats
}
}
if stats, err := s.usageLogRepo.GetAccountWindowStats(ctx, account.ID, now.Add(-7*24*time.Hour)); err == nil {
windowStats := windowStatsFromAccountStats(stats)
if hasMeaningfulWindowStats(windowStats) {
if usage.SevenDay == nil {
usage.SevenDay = &UsageProgress{Utilization: 0}
}
usage.SevenDay.WindowStats = windowStats
}
}
return usage, nil
}
func (s *AccountUsageService) shouldProbeOpenAICodexSnapshot(accountID int64, now time.Time) bool {
if s == nil || s.cache == nil || accountID <= 0 {
return true
}
if cached, ok := s.cache.openAIProbeCache.Load(accountID); ok {
if ts, ok := cached.(time.Time); ok && now.Sub(ts) < openAIProbeCacheTTL {
return false
}
}
s.cache.openAIProbeCache.Store(accountID, now)
return true
}
func (s *AccountUsageService) probeOpenAICodexSnapshot(ctx context.Context, account *Account) (map[string]any, error) {
if account == nil || !account.IsOAuth() {
return nil, nil
}
accessToken := account.GetOpenAIAccessToken()
if accessToken == "" {
return nil, fmt.Errorf("no access token available")
}
modelID := openaipkg.DefaultTestModel
payload := createOpenAITestPayload(modelID, true)
payloadBytes, err := json.Marshal(payload)
if err != nil {
return nil, fmt.Errorf("marshal openai probe payload: %w", err)
}
reqCtx, cancel := context.WithTimeout(ctx, 15*time.Second)
defer cancel()
req, err := http.NewRequestWithContext(reqCtx, http.MethodPost, chatgptCodexURL, bytes.NewReader(payloadBytes))
if err != nil {
return nil, fmt.Errorf("create openai probe request: %w", err)
}
req.Host = "chatgpt.com"
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+accessToken)
req.Header.Set("Accept", "text/event-stream")
req.Header.Set("OpenAI-Beta", "responses=experimental")
req.Header.Set("Originator", "codex_cli_rs")
req.Header.Set("Version", codexCLIVersion)
req.Header.Set("User-Agent", codexCLIUserAgent)
if s.identityCache != nil {
if fp, fpErr := s.identityCache.GetFingerprint(reqCtx, account.ID); fpErr == nil && fp != nil && strings.TrimSpace(fp.UserAgent) != "" {
req.Header.Set("User-Agent", strings.TrimSpace(fp.UserAgent))
}
}
if chatgptAccountID := account.GetChatGPTAccountID(); chatgptAccountID != "" {
req.Header.Set("chatgpt-account-id", chatgptAccountID)
}
proxyURL := ""
if account.ProxyID != nil && account.Proxy != nil {
proxyURL = account.Proxy.URL()
}
client, err := httppool.GetClient(httppool.Options{
ProxyURL: proxyURL,
Timeout: 15 * time.Second,
ResponseHeaderTimeout: 10 * time.Second,
})
if err != nil {
return nil, fmt.Errorf("build openai probe client: %w", err)
}
resp, err := client.Do(req)
if err != nil {
return nil, fmt.Errorf("openai codex probe request failed: %w", err)
}
defer func() { _ = resp.Body.Close() }()
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
return nil, fmt.Errorf("openai codex probe returned status %d", resp.StatusCode)
}
if snapshot := ParseCodexRateLimitHeaders(resp.Header); snapshot != nil {
updates := buildCodexUsageExtraUpdates(snapshot, time.Now())
if len(updates) > 0 {
go func(accountID int64, updates map[string]any) {
updateCtx, updateCancel := context.WithTimeout(context.Background(), 5*time.Second)
defer updateCancel()
_ = s.accountRepo.UpdateExtra(updateCtx, accountID, updates)
}(account.ID, updates)
return updates, nil
}
}
return nil, nil
}
func mergeAccountExtra(account *Account, updates map[string]any) {
if account == nil || len(updates) == 0 {
return
}
if account.Extra == nil {
account.Extra = make(map[string]any, len(updates))
}
for k, v := range updates {
account.Extra[k] = v
}
}
func (s *AccountUsageService) getGeminiUsage(ctx context.Context, account *Account) (*UsageInfo, error) { func (s *AccountUsageService) getGeminiUsage(ctx context.Context, account *Account) (*UsageInfo, error) {
now := time.Now() now := time.Now()
usage := &UsageInfo{ usage := &UsageInfo{
@@ -519,6 +689,72 @@ func windowStatsFromAccountStats(stats *usagestats.AccountStats) *WindowStats {
} }
} }
func hasMeaningfulWindowStats(stats *WindowStats) bool {
if stats == nil {
return false
}
return stats.Requests > 0 || stats.Tokens > 0 || stats.Cost > 0 || stats.StandardCost > 0 || stats.UserCost > 0
}
func buildCodexUsageProgressFromExtra(extra map[string]any, window string, now time.Time) *UsageProgress {
if len(extra) == 0 {
return nil
}
var (
usedPercentKey string
resetAfterKey string
resetAtKey string
)
switch window {
case "5h":
usedPercentKey = "codex_5h_used_percent"
resetAfterKey = "codex_5h_reset_after_seconds"
resetAtKey = "codex_5h_reset_at"
case "7d":
usedPercentKey = "codex_7d_used_percent"
resetAfterKey = "codex_7d_reset_after_seconds"
resetAtKey = "codex_7d_reset_at"
default:
return nil
}
usedRaw, ok := extra[usedPercentKey]
if !ok {
return nil
}
progress := &UsageProgress{Utilization: parseExtraFloat64(usedRaw)}
if resetAtRaw, ok := extra[resetAtKey]; ok {
if resetAt, err := parseTime(fmt.Sprint(resetAtRaw)); err == nil {
progress.ResetsAt = &resetAt
progress.RemainingSeconds = int(time.Until(resetAt).Seconds())
if progress.RemainingSeconds < 0 {
progress.RemainingSeconds = 0
}
}
}
if progress.ResetsAt == nil {
if resetAfterSeconds := parseExtraInt(extra[resetAfterKey]); resetAfterSeconds > 0 {
base := now
if updatedAtRaw, ok := extra["codex_usage_updated_at"]; ok {
if updatedAt, err := parseTime(fmt.Sprint(updatedAtRaw)); err == nil {
base = updatedAt
}
}
resetAt := base.Add(time.Duration(resetAfterSeconds) * time.Second)
progress.ResetsAt = &resetAt
progress.RemainingSeconds = int(time.Until(resetAt).Seconds())
if progress.RemainingSeconds < 0 {
progress.RemainingSeconds = 0
}
}
}
return progress
}
func (s *AccountUsageService) GetAccountUsageStats(ctx context.Context, accountID int64, startTime, endTime time.Time) (*usagestats.AccountUsageStatsResponse, error) { func (s *AccountUsageService) GetAccountUsageStats(ctx context.Context, accountID int64, startTime, endTime time.Time) (*usagestats.AccountUsageStatsResponse, error) {
stats, err := s.usageLogRepo.GetAccountUsageStats(ctx, accountID, startTime, endTime) stats, err := s.usageLogRepo.GetAccountUsageStats(ctx, accountID, startTime, endTime)
if err != nil { if err != nil {

View File

@@ -210,6 +210,7 @@ type OpenAIForwardResult struct {
ReasoningEffort *string ReasoningEffort *string
Stream bool Stream bool
OpenAIWSMode bool OpenAIWSMode bool
ResponseHeaders http.Header
Duration time.Duration Duration time.Duration
FirstTokenMs *int FirstTokenMs *int
} }
@@ -3747,6 +3748,15 @@ func (s *OpenAIGatewayService) updateCodexUsageSnapshot(ctx context.Context, acc
}() }()
} }
func (s *OpenAIGatewayService) UpdateCodexUsageSnapshotFromHeaders(ctx context.Context, accountID int64, headers http.Header) {
if accountID <= 0 || headers == nil {
return
}
if snapshot := ParseCodexRateLimitHeaders(headers); snapshot != nil {
s.updateCodexUsageSnapshot(ctx, accountID, snapshot)
}
}
func getOpenAIReasoningEffortFromReqBody(reqBody map[string]any) (value string, present bool) { func getOpenAIReasoningEffortFromReqBody(reqBody map[string]any) (value string, present bool) {
if reqBody == nil { if reqBody == nil {
return "", false return "", false

View File

@@ -28,6 +28,22 @@ type stubOpenAIAccountRepo struct {
accounts []Account accounts []Account
} }
type snapshotUpdateAccountRepo struct {
stubOpenAIAccountRepo
updateExtraCalls chan map[string]any
}
func (r *snapshotUpdateAccountRepo) UpdateExtra(ctx context.Context, id int64, updates map[string]any) error {
if r.updateExtraCalls != nil {
copied := make(map[string]any, len(updates))
for k, v := range updates {
copied[k] = v
}
r.updateExtraCalls <- copied
}
return nil
}
func (r stubOpenAIAccountRepo) GetByID(ctx context.Context, id int64) (*Account, error) { func (r stubOpenAIAccountRepo) GetByID(ctx context.Context, id int64) (*Account, error) {
for i := range r.accounts { for i := range r.accounts {
if r.accounts[i].ID == id { if r.accounts[i].ID == id {
@@ -1248,8 +1264,115 @@ func TestOpenAIValidateUpstreamBaseURLEnabledEnforcesAllowlist(t *testing.T) {
} }
} }
// ==================== P1-08 修复model 替换性能优化测试 ==================== func TestOpenAIUpdateCodexUsageSnapshotFromHeaders(t *testing.T) {
repo := &snapshotUpdateAccountRepo{updateExtraCalls: make(chan map[string]any, 1)}
svc := &OpenAIGatewayService{accountRepo: repo}
headers := http.Header{}
headers.Set("x-codex-primary-used-percent", "12")
headers.Set("x-codex-secondary-used-percent", "34")
headers.Set("x-codex-primary-window-minutes", "300")
headers.Set("x-codex-secondary-window-minutes", "10080")
headers.Set("x-codex-primary-reset-after-seconds", "600")
headers.Set("x-codex-secondary-reset-after-seconds", "86400")
svc.UpdateCodexUsageSnapshotFromHeaders(context.Background(), 123, headers)
select {
case updates := <-repo.updateExtraCalls:
require.Equal(t, 12.0, updates["codex_5h_used_percent"])
require.Equal(t, 34.0, updates["codex_7d_used_percent"])
require.Equal(t, 600, updates["codex_5h_reset_after_seconds"])
require.Equal(t, 86400, updates["codex_7d_reset_after_seconds"])
case <-time.After(2 * time.Second):
t.Fatal("expected UpdateExtra to be called")
}
}
func TestOpenAIResponsesRequestPathSuffix(t *testing.T) {
gin.SetMode(gin.TestMode)
rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
tests := []struct {
name string
path string
want string
}{
{name: "exact v1 responses", path: "/v1/responses", want: ""},
{name: "compact v1 responses", path: "/v1/responses/compact", want: "/compact"},
{name: "compact alias responses", path: "/responses/compact/", want: "/compact"},
{name: "nested suffix", path: "/openai/v1/responses/compact/detail", want: "/compact/detail"},
{name: "unrelated path", path: "/v1/chat/completions", want: ""},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c.Request = httptest.NewRequest(http.MethodPost, tt.path, nil)
require.Equal(t, tt.want, openAIResponsesRequestPathSuffix(c))
})
}
}
func TestOpenAIBuildUpstreamRequestOpenAIPassthroughPreservesCompactPath(t *testing.T) {
gin.SetMode(gin.TestMode)
rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
c.Request = httptest.NewRequest(http.MethodPost, "/v1/responses/compact", bytes.NewReader([]byte(`{"model":"gpt-5"}`)))
svc := &OpenAIGatewayService{}
account := &Account{Type: AccountTypeOAuth}
req, err := svc.buildUpstreamRequestOpenAIPassthrough(c.Request.Context(), c, account, []byte(`{"model":"gpt-5"}`), "token")
require.NoError(t, err)
require.Equal(t, chatgptCodexURL+"/compact", req.URL.String())
require.Equal(t, "application/json", req.Header.Get("Accept"))
require.Equal(t, codexCLIVersion, req.Header.Get("Version"))
require.NotEmpty(t, req.Header.Get("Session_Id"))
}
func TestOpenAIBuildUpstreamRequestCompactForcesJSONAcceptForOAuth(t *testing.T) {
gin.SetMode(gin.TestMode)
rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
c.Request = httptest.NewRequest(http.MethodPost, "/v1/responses/compact", bytes.NewReader([]byte(`{"model":"gpt-5"}`)))
svc := &OpenAIGatewayService{}
account := &Account{
Type: AccountTypeOAuth,
Credentials: map[string]any{"chatgpt_account_id": "chatgpt-acc"},
}
req, err := svc.buildUpstreamRequest(c.Request.Context(), c, account, []byte(`{"model":"gpt-5"}`), "token", false, "", true)
require.NoError(t, err)
require.Equal(t, chatgptCodexURL+"/compact", req.URL.String())
require.Equal(t, "application/json", req.Header.Get("Accept"))
require.Equal(t, codexCLIVersion, req.Header.Get("Version"))
require.NotEmpty(t, req.Header.Get("Session_Id"))
}
func TestOpenAIBuildUpstreamRequestPreservesCompactPathForAPIKeyBaseURL(t *testing.T) {
gin.SetMode(gin.TestMode)
rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
c.Request = httptest.NewRequest(http.MethodPost, "/responses/compact", bytes.NewReader([]byte(`{"model":"gpt-5"}`)))
svc := &OpenAIGatewayService{cfg: &config.Config{
Security: config.SecurityConfig{
URLAllowlist: config.URLAllowlistConfig{Enabled: false},
},
}}
account := &Account{
Type: AccountTypeAPIKey,
Platform: PlatformOpenAI,
Credentials: map[string]any{"base_url": "https://example.com/v1"},
}
req, err := svc.buildUpstreamRequest(c.Request.Context(), c, account, []byte(`{"model":"gpt-5"}`), "token", false, "", false)
require.NoError(t, err)
require.Equal(t, "https://example.com/v1/responses/compact", req.URL.String())
}
// ==================== P1-08 修复model 替换性能优化测试 =============
func TestReplaceModelInSSELine(t *testing.T) { func TestReplaceModelInSSELine(t *testing.T) {
svc := &OpenAIGatewayService{} svc := &OpenAIGatewayService{}

View File

@@ -2309,6 +2309,7 @@ func (s *OpenAIGatewayService) forwardOpenAIWSV2(
ReasoningEffort: extractOpenAIReasoningEffort(reqBody, originalModel), ReasoningEffort: extractOpenAIReasoningEffort(reqBody, originalModel),
Stream: reqStream, Stream: reqStream,
OpenAIWSMode: true, OpenAIWSMode: true,
ResponseHeaders: lease.HandshakeHeaders(),
Duration: time.Since(startTime), Duration: time.Since(startTime),
FirstTokenMs: firstTokenMs, FirstTokenMs: firstTokenMs,
}, nil }, nil
@@ -2919,6 +2920,7 @@ func (s *OpenAIGatewayService) ProxyResponsesWebSocketFromClient(
ReasoningEffort: extractOpenAIReasoningEffortFromBody(payload, originalModel), ReasoningEffort: extractOpenAIReasoningEffortFromBody(payload, originalModel),
Stream: reqStream, Stream: reqStream,
OpenAIWSMode: true, OpenAIWSMode: true,
ResponseHeaders: lease.HandshakeHeaders(),
Duration: time.Since(turnStart), Duration: time.Since(turnStart),
FirstTokenMs: firstTokenMs, FirstTokenMs: firstTokenMs,
}, nil }, nil

View File

@@ -126,6 +126,13 @@ func (l *openAIWSConnLease) HandshakeHeader(name string) string {
return l.conn.handshakeHeader(name) return l.conn.handshakeHeader(name)
} }
func (l *openAIWSConnLease) HandshakeHeaders() http.Header {
if l == nil || l.conn == nil {
return nil
}
return cloneHeader(l.conn.handshakeHeaders)
}
func (l *openAIWSConnLease) IsPrewarmed() bool { func (l *openAIWSConnLease) IsPrewarmed() bool {
if l == nil || l.conn == nil { if l == nil || l.conn == nil {
return false return false

View File

@@ -180,6 +180,7 @@ func (s *OpenAIGatewayService) proxyResponsesWebSocketV2Passthrough(
Model: turn.RequestModel, Model: turn.RequestModel,
Stream: true, Stream: true,
OpenAIWSMode: true, OpenAIWSMode: true,
ResponseHeaders: cloneHeader(handshakeHeaders),
Duration: turn.Duration, Duration: turn.Duration,
FirstTokenMs: turn.FirstTokenMs, FirstTokenMs: turn.FirstTokenMs,
} }
@@ -226,6 +227,7 @@ func (s *OpenAIGatewayService) proxyResponsesWebSocketV2Passthrough(
Model: relayResult.RequestModel, Model: relayResult.RequestModel,
Stream: true, Stream: true,
OpenAIWSMode: true, OpenAIWSMode: true,
ResponseHeaders: cloneHeader(handshakeHeaders),
Duration: relayResult.Duration, Duration: relayResult.Duration,
FirstTokenMs: relayResult.FirstTokenMs, FirstTokenMs: relayResult.FirstTokenMs,
} }

View File

@@ -90,6 +90,36 @@
color="emerald" color="emerald"
/> />
</div> </div>
<div v-else-if="loading" class="space-y-1.5">
<div class="flex items-center gap-1">
<div class="h-3 w-[32px] animate-pulse rounded bg-gray-200 dark:bg-gray-700"></div>
<div class="h-1.5 w-8 animate-pulse rounded-full bg-gray-200 dark:bg-gray-700"></div>
<div class="h-3 w-[32px] animate-pulse rounded bg-gray-200 dark:bg-gray-700"></div>
</div>
<div class="flex items-center gap-1">
<div class="h-3 w-[32px] animate-pulse rounded bg-gray-200 dark:bg-gray-700"></div>
<div class="h-1.5 w-8 animate-pulse rounded-full bg-gray-200 dark:bg-gray-700"></div>
<div class="h-3 w-[32px] animate-pulse rounded bg-gray-200 dark:bg-gray-700"></div>
</div>
</div>
<div v-else-if="hasOpenAIUsageFallback" class="space-y-1">
<UsageProgressBar
v-if="usageInfo?.five_hour"
label="5h"
:utilization="usageInfo.five_hour.utilization"
:resets-at="usageInfo.five_hour.resets_at"
:window-stats="usageInfo.five_hour.window_stats"
color="indigo"
/>
<UsageProgressBar
v-if="usageInfo?.seven_day"
label="7d"
:utilization="usageInfo.seven_day.utilization"
:resets-at="usageInfo.seven_day.resets_at"
:window-stats="usageInfo.seven_day.window_stats"
color="emerald"
/>
</div>
<div v-else class="text-xs text-gray-400">-</div> <div v-else class="text-xs text-gray-400">-</div>
</template> </template>
@@ -313,6 +343,9 @@ const shouldFetchUsage = computed(() => {
if (props.account.platform === 'antigravity') { if (props.account.platform === 'antigravity') {
return props.account.type === 'oauth' return props.account.type === 'oauth'
} }
if (props.account.platform === 'openai') {
return props.account.type === 'oauth'
}
return false return false
}) })
@@ -335,6 +368,11 @@ const hasCodexUsage = computed(() => {
return codex5hWindow.value.usedPercent !== null || codex7dWindow.value.usedPercent !== null return codex5hWindow.value.usedPercent !== null || codex7dWindow.value.usedPercent !== null
}) })
const hasOpenAIUsageFallback = computed(() => {
if (props.account.platform !== 'openai' || props.account.type !== 'oauth') return false
return !!usageInfo.value?.five_hour || !!usageInfo.value?.seven_day
})
const codex5hUsedPercent = computed(() => codex5hWindow.value.usedPercent) const codex5hUsedPercent = computed(() => codex5hWindow.value.usedPercent)
const codex5hResetAt = computed(() => codex5hWindow.value.resetAt) const codex5hResetAt = computed(() => codex5hWindow.value.resetAt)
const codex7dUsedPercent = computed(() => codex7dWindow.value.usedPercent) const codex7dUsedPercent = computed(() => codex7dWindow.value.usedPercent)

View File

@@ -67,4 +67,59 @@ describe('AccountUsageCell', () => {
expect(wrapper.text()).toContain('admin.accounts.usageWindow.gemini3Image|70|2026-03-01T09:00:00Z') expect(wrapper.text()).toContain('admin.accounts.usageWindow.gemini3Image|70|2026-03-01T09:00:00Z')
}) })
it('OpenAI OAuth 在无 codex 快照时会回退显示 usage 接口窗口', async () => {
getUsage.mockResolvedValue({
five_hour: {
utilization: 0,
resets_at: null,
remaining_seconds: 0,
window_stats: {
requests: 2,
tokens: 27700,
cost: 0.06,
standard_cost: 0.06,
user_cost: 0.06
}
},
seven_day: {
utilization: 0,
resets_at: null,
remaining_seconds: 0,
window_stats: {
requests: 2,
tokens: 27700,
cost: 0.06,
standard_cost: 0.06,
user_cost: 0.06
}
}
})
const wrapper = mount(AccountUsageCell, {
props: {
account: {
id: 2002,
platform: 'openai',
type: 'oauth',
extra: {}
} as any
},
global: {
stubs: {
UsageProgressBar: {
props: ['label', 'utilization', 'resetsAt', 'windowStats', 'color'],
template: '<div class="usage-bar">{{ label }}|{{ utilization }}|{{ windowStats?.tokens }}</div>'
},
AccountQuotaInfo: true
}
}
})
await flushPromises()
expect(getUsage).toHaveBeenCalledWith(2002)
expect(wrapper.text()).toContain('5h|0|27700')
expect(wrapper.text()).toContain('7d|0|27700')
})
}) })