This commit is contained in:
Sakurasan
2025-04-21 21:50:29 +08:00
parent 51d4651c6c
commit a9ff7e1c94
3 changed files with 17 additions and 17 deletions

View File

@@ -60,22 +60,21 @@ func (h *Proxy) ChatHandler(c *gin.Context) {
c.SSEvent("", data) c.SSEvent("", data)
} }
} }
go func() {
llmusage := llm.GetTokenUsage()
cost := tokenizer.Cost(llmusage.Model, llmusage.PromptTokens+llmusage.ToolsTokens, llmusage.CompletionTokens) llmusage := llm.GetTokenUsage()
userid, _ := strconv.ParseInt(c.GetString("user_id"), 10, 64)
usage := model.Usage{ cost := tokenizer.Cost(llmusage.Model, llmusage.PromptTokens+llmusage.ToolsTokens, llmusage.CompletionTokens)
UserID: userid, userid, _ := strconv.ParseInt(c.GetString("user_id"), 10, 64)
Model: llmusage.Model, usage := model.Usage{
Stream: chatreq.Stream, UserID: userid,
PromptTokens: llmusage.PromptTokens + llmusage.ToolsTokens, Model: llmusage.Model,
CompletionTokens: llmusage.CompletionTokens, Stream: chatreq.Stream,
TotalTokens: llmusage.TotalTokens, PromptTokens: llmusage.PromptTokens + llmusage.ToolsTokens,
Cost: fmt.Sprintf("%f", cost), CompletionTokens: llmusage.CompletionTokens,
} TotalTokens: llmusage.TotalTokens,
h.SendUsage(&usage) Cost: fmt.Sprintf("%f", cost),
defer fmt.Println("cost:", cost, "prompt_tokens:", llmusage.PromptTokens, "completion_tokens:", llmusage.CompletionTokens, "total_tokens:", llmusage.TotalTokens) }
}() h.SendUsage(&usage)
defer fmt.Println("cost:", cost, "prompt_tokens:", llmusage.PromptTokens, "completion_tokens:", llmusage.CompletionTokens, "total_tokens:", llmusage.TotalTokens)
} }

View File

@@ -13,7 +13,7 @@ type LLM interface {
type llm struct { type llm struct {
ApiKey *model.ApiKey ApiKey *model.ApiKey
Usage *model.Usage Usage *TokenUsage
tools any // TODO tools any // TODO
Messages []any // TODO Messages []any // TODO
llm LLM llm LLM

View File

@@ -218,6 +218,7 @@ func (o *OpenAICompatible) StreamChat(ctx context.Context, chatReq llm.ChatReque
// case output <- &streamResp: // case output <- &streamResp:
// } // }
} }
fmt.Println("llm usage:", o.tokenUsage.Model, o.tokenUsage.PromptTokens, o.tokenUsage.CompletionTokens, o.tokenUsage.TotalTokens)
}() }()
return output, nil return output, nil
} }