Skip to content

Commit c3c258a

Browse files
authored
proxy: fix metrics capture for v1/responses (#586)
properly parse anthropic compatible usage data from streaming responses. closes: #577
1 parent 29a38fd commit c3c258a

File tree

2 files changed

+36
-2
lines changed

2 files changed

+36
-2
lines changed

proxy/metrics_monitor.go

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -350,6 +350,11 @@ func processStreamingResponse(modelID string, start time.Time, body []byte) (Tok
350350
usage := parsed.Get("usage")
351351
timings := parsed.Get("timings")
352352

353+
// v1/responses format nests usage under response.usage
354+
if !usage.Exists() {
355+
usage = parsed.Get("response.usage")
356+
}
357+
353358
if usage.Exists() || timings.Exists() {
354359
return parseMetrics(modelID, start, usage, timings)
355360
}
@@ -503,9 +508,9 @@ func filterAcceptEncoding(acceptEncoding string) string {
503508
supported := map[string]bool{"gzip": true, "deflate": true}
504509
var filtered []string
505510

506-
for _, part := range strings.Split(acceptEncoding, ",") {
511+
for part := range strings.SplitSeq(acceptEncoding, ",") {
507512
// Parse encoding and optional quality value (e.g., "gzip;q=1.0")
508-
encoding := strings.TrimSpace(strings.Split(part, ";")[0])
513+
encoding, _, _ := strings.Cut(strings.TrimSpace(part), ";")
509514
if supported[strings.ToLower(encoding)] {
510515
filtered = append(filtered, strings.TrimSpace(part))
511516
}

proxy/metrics_monitor_test.go

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -709,6 +709,35 @@ data: [DONE]
709709
assert.Equal(t, 0, metrics[0].OutputTokens)
710710
})
711711

712+
t.Run("v1/responses format with nested response.usage", func(t *testing.T) {
713+
mm := newMetricsMonitor(testLogger, 10, 0)
714+
715+
// v1/responses SSE format: usage is nested under response.usage
716+
responseBody := "event: response.completed\n" +
717+
`data: {"type":"response.completed","response":{"id":"resp_abc","object":"response","created_at":1773416985,"status":"completed","model":"test-model","output":[],"usage":{"input_tokens":17,"output_tokens":23,"total_tokens":40}}}` +
718+
"\n\n"
719+
720+
nextHandler := func(modelID string, w http.ResponseWriter, r *http.Request) error {
721+
w.Header().Set("Content-Type", "text/event-stream")
722+
w.WriteHeader(http.StatusOK)
723+
w.Write([]byte(responseBody))
724+
return nil
725+
}
726+
727+
req := httptest.NewRequest("POST", "/v1/responses", nil)
728+
rec := httptest.NewRecorder()
729+
ginCtx, _ := gin.CreateTestContext(rec)
730+
731+
err := mm.wrapHandler("test-model", ginCtx.Writer, req, nextHandler)
732+
assert.NoError(t, err)
733+
734+
metrics := mm.getMetrics()
735+
assert.Equal(t, 1, len(metrics))
736+
assert.Equal(t, "test-model", metrics[0].Model)
737+
assert.Equal(t, 17, metrics[0].InputTokens)
738+
assert.Equal(t, 23, metrics[0].OutputTokens)
739+
})
740+
712741
t.Run("handles empty streaming response records minimal metrics", func(t *testing.T) {
713742
mm := newMetricsMonitor(testLogger, 10, 0)
714743

0 commit comments

Comments
 (0)