update: package version

This commit is contained in:
Sakurasan
2024-10-21 16:24:58 +08:00
parent 0c99913e91
commit ae3b72c0ff
4 changed files with 168 additions and 92 deletions

View File

@@ -67,7 +67,7 @@ type Tool struct {
}
type StreamOption struct {
IncludeUsage bool `json:"include_Usage,omitempty"`
IncludeUsage bool `json:"include_usage,omitempty"`
}
type ChatCompletionRequest struct {
@@ -77,7 +77,7 @@ type ChatCompletionRequest struct {
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
N int `json:"n,omitempty"`
Stream bool `json:"stream,omitempty"`
Stream bool `json:"stream"`
Stop []string `json:"stop,omitempty"`
PresencePenalty float64 `json:"presence_penalty,omitempty"`
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
@@ -205,9 +205,9 @@ func ChatProxy(c *gin.Context, chatReq *ChatCompletionRequest) {
case "gpt-4o", "gpt-4o-mini", "chatgpt-4o-latest":
chatReq.MaxTokens = 16384
}
// if chatReq.Stream == true {
// chatReq.StreamOptions.IncludeUsage = true
// }
if chatReq.Stream {
chatReq.StreamOptions.IncludeUsage = true
}
usagelog.PromptCount = tokenizer.NumTokensFromStr(prompt, chatReq.Model)