Merge pull request #2254 from clcc2019/main

refactor: streamline usage reporting by consolidating record publishi…
This commit is contained in:
Luis Pater
2026-03-24 00:39:31 +08:00
committed by GitHub
5 changed files with 163 additions and 24 deletions

View File

@@ -73,17 +73,7 @@ func (r *usageReporter) publishWithOutcome(ctx context.Context, detail usage.Det
return
}
r.once.Do(func() {
usage.PublishRecord(ctx, usage.Record{
Provider: r.provider,
Model: r.model,
Source: r.source,
APIKey: r.apiKey,
AuthID: r.authID,
AuthIndex: r.authIndex,
RequestedAt: r.requestedAt,
Failed: failed,
Detail: detail,
})
usage.PublishRecord(ctx, r.buildRecord(detail, failed))
})
}
@@ -96,20 +86,39 @@ func (r *usageReporter) ensurePublished(ctx context.Context) {
return
}
r.once.Do(func() {
usage.PublishRecord(ctx, usage.Record{
Provider: r.provider,
Model: r.model,
Source: r.source,
APIKey: r.apiKey,
AuthID: r.authID,
AuthIndex: r.authIndex,
RequestedAt: r.requestedAt,
Failed: false,
Detail: usage.Detail{},
})
usage.PublishRecord(ctx, r.buildRecord(usage.Detail{}, false))
})
}
func (r *usageReporter) buildRecord(detail usage.Detail, failed bool) usage.Record {
if r == nil {
return usage.Record{Detail: detail, Failed: failed}
}
return usage.Record{
Provider: r.provider,
Model: r.model,
Source: r.source,
APIKey: r.apiKey,
AuthID: r.authID,
AuthIndex: r.authIndex,
RequestedAt: r.requestedAt,
Latency: r.latency(),
Failed: failed,
Detail: detail,
}
}
func (r *usageReporter) latency() time.Duration {
if r == nil || r.requestedAt.IsZero() {
return 0
}
latency := time.Since(r.requestedAt)
if latency < 0 {
return 0
}
return latency
}
func apiKeyFromContext(ctx context.Context) string {
if ctx == nil {
return ""

View File

@@ -1,6 +1,11 @@
package executor
import "testing"
import (
"testing"
"time"
"github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/usage"
)
func TestParseOpenAIUsageChatCompletions(t *testing.T) {
data := []byte(`{"usage":{"prompt_tokens":1,"completion_tokens":2,"total_tokens":3,"prompt_tokens_details":{"cached_tokens":4},"completion_tokens_details":{"reasoning_tokens":5}}}`)
@@ -41,3 +46,19 @@ func TestParseOpenAIUsageResponses(t *testing.T) {
t.Fatalf("reasoning tokens = %d, want %d", detail.ReasoningTokens, 9)
}
}
func TestUsageReporterBuildRecordIncludesLatency(t *testing.T) {
reporter := &usageReporter{
provider: "openai",
model: "gpt-5.4",
requestedAt: time.Now().Add(-1500 * time.Millisecond),
}
record := reporter.buildRecord(usage.Detail{TotalTokens: 3}, false)
if record.Latency < time.Second {
t.Fatalf("latency = %v, want >= 1s", record.Latency)
}
if record.Latency > 3*time.Second {
t.Fatalf("latency = %v, want <= 3s", record.Latency)
}
}

View File

@@ -87,9 +87,10 @@ type modelStats struct {
Details []RequestDetail
}
// RequestDetail stores the timestamp and token usage for a single request.
// RequestDetail stores the timestamp, latency, and token usage for a single request.
type RequestDetail struct {
Timestamp time.Time `json:"timestamp"`
LatencyMs int64 `json:"latency_ms"`
Source string `json:"source"`
AuthIndex string `json:"auth_index"`
Tokens TokenStats `json:"tokens"`
@@ -198,6 +199,7 @@ func (s *RequestStatistics) Record(ctx context.Context, record coreusage.Record)
}
s.updateAPIStats(stats, modelName, RequestDetail{
Timestamp: timestamp,
LatencyMs: normaliseLatency(record.Latency),
Source: record.Source,
AuthIndex: record.AuthIndex,
Tokens: detail,
@@ -332,6 +334,9 @@ func (s *RequestStatistics) MergeSnapshot(snapshot StatisticsSnapshot) MergeResu
}
for _, detail := range modelSnapshot.Details {
detail.Tokens = normaliseTokenStats(detail.Tokens)
if detail.LatencyMs < 0 {
detail.LatencyMs = 0
}
if detail.Timestamp.IsZero() {
detail.Timestamp = time.Now()
}
@@ -463,6 +468,13 @@ func normaliseTokenStats(tokens TokenStats) TokenStats {
return tokens
}
func normaliseLatency(latency time.Duration) int64 {
if latency <= 0 {
return 0
}
return latency.Milliseconds()
}
func formatHour(hour int) string {
if hour < 0 {
hour = 0

View File

@@ -0,0 +1,96 @@
package usage
import (
"context"
"testing"
"time"
coreusage "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/usage"
)
func TestRequestStatisticsRecordIncludesLatency(t *testing.T) {
stats := NewRequestStatistics()
stats.Record(context.Background(), coreusage.Record{
APIKey: "test-key",
Model: "gpt-5.4",
RequestedAt: time.Date(2026, 3, 20, 12, 0, 0, 0, time.UTC),
Latency: 1500 * time.Millisecond,
Detail: coreusage.Detail{
InputTokens: 10,
OutputTokens: 20,
TotalTokens: 30,
},
})
snapshot := stats.Snapshot()
details := snapshot.APIs["test-key"].Models["gpt-5.4"].Details
if len(details) != 1 {
t.Fatalf("details len = %d, want 1", len(details))
}
if details[0].LatencyMs != 1500 {
t.Fatalf("latency_ms = %d, want 1500", details[0].LatencyMs)
}
}
func TestRequestStatisticsMergeSnapshotDedupIgnoresLatency(t *testing.T) {
stats := NewRequestStatistics()
timestamp := time.Date(2026, 3, 20, 12, 0, 0, 0, time.UTC)
first := StatisticsSnapshot{
APIs: map[string]APISnapshot{
"test-key": {
Models: map[string]ModelSnapshot{
"gpt-5.4": {
Details: []RequestDetail{{
Timestamp: timestamp,
LatencyMs: 0,
Source: "user@example.com",
AuthIndex: "0",
Tokens: TokenStats{
InputTokens: 10,
OutputTokens: 20,
TotalTokens: 30,
},
}},
},
},
},
},
}
second := StatisticsSnapshot{
APIs: map[string]APISnapshot{
"test-key": {
Models: map[string]ModelSnapshot{
"gpt-5.4": {
Details: []RequestDetail{{
Timestamp: timestamp,
LatencyMs: 2500,
Source: "user@example.com",
AuthIndex: "0",
Tokens: TokenStats{
InputTokens: 10,
OutputTokens: 20,
TotalTokens: 30,
},
}},
},
},
},
},
}
result := stats.MergeSnapshot(first)
if result.Added != 1 || result.Skipped != 0 {
t.Fatalf("first merge = %+v, want added=1 skipped=0", result)
}
result = stats.MergeSnapshot(second)
if result.Added != 0 || result.Skipped != 1 {
t.Fatalf("second merge = %+v, want added=0 skipped=1", result)
}
snapshot := stats.Snapshot()
details := snapshot.APIs["test-key"].Models["gpt-5.4"].Details
if len(details) != 1 {
t.Fatalf("details len = %d, want 1", len(details))
}
}

View File

@@ -17,6 +17,7 @@ type Record struct {
AuthIndex string
Source string
RequestedAt time.Time
Latency time.Duration
Failed bool
Detail Detail
}