refactor: streamline usage reporting by consolidating record publishing logic

- Introduced a new method `buildRecord` in `usageReporter` to encapsulate record creation, improving code readability and maintainability.
- Added latency tracking to usage records, ensuring accurate reporting of request latencies.
- Updated tests to validate the inclusion of latency in usage records and ensure proper functionality of the new reporting structure.
This commit is contained in:
clcc2019
2026-03-20 19:44:26 +08:00
parent db63f9b5d6
commit c1bf298216
5 changed files with 163 additions and 24 deletions

View File

@@ -87,9 +87,10 @@ type modelStats struct {
Details []RequestDetail
}
// RequestDetail stores the timestamp and token usage for a single request.
// RequestDetail stores the timestamp, latency, and token usage for a single request.
type RequestDetail struct {
Timestamp time.Time `json:"timestamp"`
LatencyMs int64 `json:"latency_ms"`
Source string `json:"source"`
AuthIndex string `json:"auth_index"`
Tokens TokenStats `json:"tokens"`
@@ -198,6 +199,7 @@ func (s *RequestStatistics) Record(ctx context.Context, record coreusage.Record)
}
s.updateAPIStats(stats, modelName, RequestDetail{
Timestamp: timestamp,
LatencyMs: normaliseLatency(record.Latency),
Source: record.Source,
AuthIndex: record.AuthIndex,
Tokens: detail,
@@ -332,6 +334,9 @@ func (s *RequestStatistics) MergeSnapshot(snapshot StatisticsSnapshot) MergeResu
}
for _, detail := range modelSnapshot.Details {
detail.Tokens = normaliseTokenStats(detail.Tokens)
if detail.LatencyMs < 0 {
detail.LatencyMs = 0
}
if detail.Timestamp.IsZero() {
detail.Timestamp = time.Now()
}
@@ -463,6 +468,13 @@ func normaliseTokenStats(tokens TokenStats) TokenStats {
return tokens
}
func normaliseLatency(latency time.Duration) int64 {
if latency <= 0 {
return 0
}
return latency.Milliseconds()
}
func formatHour(hour int) string {
if hour < 0 {
hour = 0

View File

@@ -0,0 +1,96 @@
package usage
import (
"context"
"testing"
"time"
coreusage "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/usage"
)
func TestRequestStatisticsRecordIncludesLatency(t *testing.T) {
stats := NewRequestStatistics()
stats.Record(context.Background(), coreusage.Record{
APIKey: "test-key",
Model: "gpt-5.4",
RequestedAt: time.Date(2026, 3, 20, 12, 0, 0, 0, time.UTC),
Latency: 1500 * time.Millisecond,
Detail: coreusage.Detail{
InputTokens: 10,
OutputTokens: 20,
TotalTokens: 30,
},
})
snapshot := stats.Snapshot()
details := snapshot.APIs["test-key"].Models["gpt-5.4"].Details
if len(details) != 1 {
t.Fatalf("details len = %d, want 1", len(details))
}
if details[0].LatencyMs != 1500 {
t.Fatalf("latency_ms = %d, want 1500", details[0].LatencyMs)
}
}
func TestRequestStatisticsMergeSnapshotDedupIgnoresLatency(t *testing.T) {
stats := NewRequestStatistics()
timestamp := time.Date(2026, 3, 20, 12, 0, 0, 0, time.UTC)
first := StatisticsSnapshot{
APIs: map[string]APISnapshot{
"test-key": {
Models: map[string]ModelSnapshot{
"gpt-5.4": {
Details: []RequestDetail{{
Timestamp: timestamp,
LatencyMs: 0,
Source: "user@example.com",
AuthIndex: "0",
Tokens: TokenStats{
InputTokens: 10,
OutputTokens: 20,
TotalTokens: 30,
},
}},
},
},
},
},
}
second := StatisticsSnapshot{
APIs: map[string]APISnapshot{
"test-key": {
Models: map[string]ModelSnapshot{
"gpt-5.4": {
Details: []RequestDetail{{
Timestamp: timestamp,
LatencyMs: 2500,
Source: "user@example.com",
AuthIndex: "0",
Tokens: TokenStats{
InputTokens: 10,
OutputTokens: 20,
TotalTokens: 30,
},
}},
},
},
},
},
}
result := stats.MergeSnapshot(first)
if result.Added != 1 || result.Skipped != 0 {
t.Fatalf("first merge = %+v, want added=1 skipped=0", result)
}
result = stats.MergeSnapshot(second)
if result.Added != 0 || result.Skipped != 1 {
t.Fatalf("second merge = %+v, want added=0 skipped=1", result)
}
snapshot := stats.Snapshot()
details := snapshot.APIs["test-key"].Models["gpt-5.4"].Details
if len(details) != 1 {
t.Fatalf("details len = %d, want 1", len(details))
}
}