From 8273507d20e66a7fab70bb788752d7d0f468ecf1 Mon Sep 17 00:00:00 2001 From: Marvin Zhang Date: Sat, 12 Apr 2025 21:25:57 +0800 Subject: [PATCH] refactor: update LLMResponseUsage struct and ChatMessage model for BSON compatibility - Added BSON tags to fields in LLMResponseUsage struct to ensure proper serialization with MongoDB. - Updated the Usage field in ChatMessage model to exclude it from JSON serialization while retaining BSON compatibility. - Introduced GetUsage method in ChatMessage to aggregate token usage from contents, enhancing usage tracking capabilities. --- core/entity/llm.go | 6 +++--- core/models/models/chat_message.go | 18 +++++++++++++++++- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/core/entity/llm.go b/core/entity/llm.go index d8e95ec6..87302b32 100644 --- a/core/entity/llm.go +++ b/core/entity/llm.go @@ -1,7 +1,7 @@ package entity type LLMResponseUsage struct { - InputTokens int `json:"input_tokens"` - OutputTokens int `json:"output_tokens"` - TotalTokens int `json:"total_tokens"` + InputTokens int `json:"input_tokens" bson:"input_tokens"` + OutputTokens int `json:"output_tokens" bson:"output_tokens"` + TotalTokens int `json:"total_tokens" bson:"total_tokens"` } diff --git a/core/models/models/chat_message.go b/core/models/models/chat_message.go index 8cd37dc0..6253ef5b 100644 --- a/core/models/models/chat_message.go +++ b/core/models/models/chat_message.go @@ -17,7 +17,7 @@ type ChatMessage struct { Model string `json:"model" bson:"model" description:"AI model used"` Status string `json:"status" bson:"status" description:"Message status (pending/completed/failed)"` Error string `json:"error,omitempty" bson:"error,omitempty" description:"Error message if failed"` - Usage *entity.LLMResponseUsage `json:"usage,omitempty" bson:"usage,omitempty" description:"Usage"` + Usage *entity.LLMResponseUsage `json:"usage,omitempty" bson:"-" description:"Usage"` } func (m *ChatMessage) GetContent() string { @@ -56,3 +56,19 @@ func (m *ChatMessage) GetContent() string { return result } + +func (m *ChatMessage) GetUsage() *entity.LLMResponseUsage { + if len(m.Contents) == 0 { + return nil + } + var usage entity.LLMResponseUsage + for _, content := range m.Contents { + if content.Usage != nil { + // Accumulate usage + usage.InputTokens += content.Usage.InputTokens + usage.OutputTokens += content.Usage.OutputTokens + usage.TotalTokens += content.Usage.TotalTokens + } + } + return &usage +}