fix(gateway): 避免SSE delta将缓存创建明细重置为0

- 仅在 delta 中 5m/1h 值大于0时覆盖 usage 明细
- 新增回归测试覆盖 delta 默认 0 不应覆盖 message_start 非零值
- 迁移 054 在删除 legacy 字段前追加一次回填,避免升级实例丢失历史写入

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
yangjianbo
2026-02-16 13:23:12 +08:00
parent 41d0383fb7
commit 6577f2ef03
3 changed files with 50 additions and 2 deletions

View File

@@ -4474,8 +4474,10 @@ func (s *GatewayService) parseSSEUsage(data string, usage *ClaudeUsage) {
// 解析嵌套的 cache_creation 对象中的 5m/1h 明细
cc5m := gjson.Get(data, "usage.cache_creation.ephemeral_5m_input_tokens")
cc1h := gjson.Get(data, "usage.cache_creation.ephemeral_1h_input_tokens")
if cc5m.Exists() || cc1h.Exists() {
if cc5m.Exists() && cc5m.Int() > 0 {
usage.CacheCreation5mTokens = int(cc5m.Int())
}
if cc1h.Exists() && cc1h.Int() > 0 {
usage.CacheCreation1hTokens = int(cc1h.Int())
}
}

View File

@@ -79,6 +79,22 @@ func TestParseSSEUsage_DeltaOverwritesWithNonZero(t *testing.T) {
require.Equal(t, 60, usage.CacheReadInputTokens)
}
func TestParseSSEUsage_DeltaDoesNotResetCacheCreationBreakdown(t *testing.T) {
svc := newMinimalGatewayService()
usage := &ClaudeUsage{}
// 先在 message_start 中写入非零 5m/1h 明细
svc.parseSSEUsage(`{"type":"message_start","message":{"usage":{"input_tokens":100,"cache_creation":{"ephemeral_5m_input_tokens":30,"ephemeral_1h_input_tokens":70}}}}`, usage)
require.Equal(t, 30, usage.CacheCreation5mTokens)
require.Equal(t, 70, usage.CacheCreation1hTokens)
// 后续 delta 带默认 0不应覆盖已有非零值
svc.parseSSEUsage(`{"type":"message_delta","usage":{"output_tokens":12,"cache_creation":{"ephemeral_5m_input_tokens":0,"ephemeral_1h_input_tokens":0}}}`, usage)
require.Equal(t, 30, usage.CacheCreation5mTokens, "delta 的 0 值不应重置 5m 明细")
require.Equal(t, 70, usage.CacheCreation1hTokens, "delta 的 0 值不应重置 1h 明细")
require.Equal(t, 12, usage.OutputTokens)
}
func TestParseSSEUsage_InvalidJSON(t *testing.T) {
svc := newMinimalGatewayService()
usage := &ClaudeUsage{}

View File

@@ -8,7 +8,37 @@
-- cache_creation_1h_tokens (defined in 001_init.sql)
--
-- Migration 009 already copied data from legacy → canonical columns.
-- This migration drops the legacy columns to avoid confusion.
-- But upgraded instances may still have post-009 writes in legacy columns.
-- Backfill once more before dropping to prevent data loss.
DO $$
BEGIN
IF EXISTS (
SELECT 1
FROM information_schema.columns
WHERE table_schema = 'public'
AND table_name = 'usage_logs'
AND column_name = 'cache_creation5m_tokens'
) THEN
UPDATE usage_logs
SET cache_creation_5m_tokens = cache_creation5m_tokens
WHERE cache_creation_5m_tokens = 0
AND cache_creation5m_tokens <> 0;
END IF;
IF EXISTS (
SELECT 1
FROM information_schema.columns
WHERE table_schema = 'public'
AND table_name = 'usage_logs'
AND column_name = 'cache_creation1h_tokens'
) THEN
UPDATE usage_logs
SET cache_creation_1h_tokens = cache_creation1h_tokens
WHERE cache_creation_1h_tokens = 0
AND cache_creation1h_tokens <> 0;
END IF;
END $$;
ALTER TABLE usage_logs DROP COLUMN IF EXISTS cache_creation5m_tokens;
ALTER TABLE usage_logs DROP COLUMN IF EXISTS cache_creation1h_tokens;