fix:bug
This commit is contained in:
@@ -1,3 +1,6 @@
|
||||
// https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/grounding-search-entry-points?authuser=2&hl=zh-cn
|
||||
//
|
||||
// https://cloud.google.com/vertex-ai/docs/generative-ai/quotas-genai
|
||||
package google
|
||||
|
||||
import (
|
||||
|
||||
@@ -88,7 +88,7 @@ type ChatCompletionRequest struct {
|
||||
Tools []Tool `json:"tools,omitempty"`
|
||||
ParallelToolCalls bool `json:"parallel_tool_calls,omitempty"`
|
||||
// ToolChoice any `json:"tool_choice,omitempty"`
|
||||
StreamOptions StreamOption `json:"stream_options,omitempty"`
|
||||
StreamOptions *StreamOption `json:"stream_options,omitempty"`
|
||||
}
|
||||
|
||||
func (c ChatCompletionRequest) ToByteJson() []byte {
|
||||
@@ -206,7 +206,7 @@ func ChatProxy(c *gin.Context, chatReq *ChatCompletionRequest) {
|
||||
chatReq.MaxTokens = 16384
|
||||
}
|
||||
if chatReq.Stream {
|
||||
chatReq.StreamOptions.IncludeUsage = true
|
||||
chatReq.StreamOptions = &StreamOption{IncludeUsage: true}
|
||||
}
|
||||
|
||||
usagelog.PromptCount = tokenizer.NumTokensFromStr(prompt, chatReq.Model)
|
||||
|
||||
Reference in New Issue
Block a user