package openai import ( "bufio" "bytes" "encoding/json" "fmt" "io" "log" "net/http" "opencatd-open/pkg/tokenizer" "opencatd-open/store" "strings" "github.com/gin-gonic/gin" ) func ChatProxy(c *gin.Context, chatReq *ChatCompletionRequest) { usagelog := store.Tokens{Model: chatReq.Model} token, _ := c.Get("localuser") lu, err := store.GetUserByToken(token.(string)) if err != nil { c.JSON(http.StatusInternalServerError, gin.H{ "error": gin.H{ "message": err.Error(), }, }) return } usagelog.UserID = int(lu.ID) var prompt string for _, msg := range chatReq.Messages { switch ct := msg.Content.(type) { case string: prompt += "<" + msg.Role + ">: " + msg.Content.(string) + "\n" case []any: for _, item := range ct { if m, ok := item.(map[string]interface{}); ok { if m["type"] == "text" { prompt += "<" + msg.Role + ">: " + m["text"].(string) + "\n" } else if m["type"] == "image_url" { if url, ok := m["image_url"].(map[string]interface{}); ok { fmt.Printf(" URL: %v\n", url["url"]) if strings.HasPrefix(url["url"].(string), "http") { fmt.Println("网络图片:", url["url"].(string)) } } } } } default: c.JSON(http.StatusInternalServerError, gin.H{ "error": gin.H{ "message": "Invalid content type", }, }) return } if len(chatReq.Tools) > 0 { tooljson, _ := json.Marshal(chatReq.Tools) prompt += ": " + string(tooljson) + "\n" } } switch chatReq.Model { case "gpt-4o", "gpt-4o-mini", "chatgpt-4o-latest": chatReq.MaxTokens = 16384 } if chatReq.Stream { chatReq.StreamOptions = &StreamOption{IncludeUsage: true} } usagelog.PromptCount = tokenizer.NumTokensFromStr(prompt, chatReq.Model) // onekey, err := store.SelectKeyCache("openai") onekey, err := store.SelectKeyCacheByModel(chatReq.Model) if err != nil { c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) return } var req *http.Request switch onekey.ApiType { case "github": req, err = http.NewRequest(c.Request.Method, Github_Marketplace, bytes.NewReader(chatReq.ToByteJson())) req.Header = c.Request.Header req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", onekey.Key)) case "azure": var buildurl string if onekey.EndPoint != "" { buildurl = fmt.Sprintf("%s/openai/deployments/%s/chat/completions?api-version=%s", onekey.EndPoint, modelmap(chatReq.Model), AzureApiVersion) } else { buildurl = fmt.Sprintf("https://%s.openai.azure.com/openai/deployments/%s/chat/completions?api-version=%s", onekey.ResourceNmae, modelmap(chatReq.Model), AzureApiVersion) } req, err = http.NewRequest(c.Request.Method, buildurl, bytes.NewReader(chatReq.ToByteJson())) req.Header = c.Request.Header req.Header.Set("api-key", onekey.Key) default: req, err = http.NewRequest(c.Request.Method, OpenAI_Endpoint, bytes.NewReader(chatReq.ToByteJson())) // default endpoint if AIGateWay_Endpoint != "" { // cloudflare gateway的endpoint req, err = http.NewRequest(c.Request.Method, AIGateWay_Endpoint, bytes.NewReader(chatReq.ToByteJson())) } if Custom_Endpoint != "" { // 自定义endpoint req, err = http.NewRequest(c.Request.Method, Custom_Endpoint, bytes.NewReader(chatReq.ToByteJson())) } if onekey.EndPoint != "" { // 优先key的endpoint req, err = http.NewRequest(c.Request.Method, onekey.EndPoint+c.Request.RequestURI, bytes.NewReader(chatReq.ToByteJson())) } req.Header = c.Request.Header req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", onekey.Key)) } if err != nil { c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) return } resp, err := http.DefaultClient.Do(req) if err != nil { c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) return } defer resp.Body.Close() var result string if chatReq.Stream { for key, value := range resp.Header { for _, v := range value { c.Writer.Header().Add(key, v) } } c.Writer.WriteHeader(resp.StatusCode) teeReader := io.TeeReader(resp.Body, c.Writer) // 流式响应 scanner := bufio.NewScanner(teeReader) for scanner.Scan() { line := scanner.Bytes() if len(line) > 0 && bytes.HasPrefix(line, []byte("data: ")) { if bytes.HasPrefix(line, []byte("data: [DONE]")) { break } var opiResp ChatCompletionStreamResponse line = bytes.Replace(line, []byte("data: "), []byte(""), -1) line = bytes.TrimSpace(line) if err := json.Unmarshal(line, &opiResp); err != nil { continue } if opiResp.Choices != nil && len(opiResp.Choices) > 0 { if opiResp.Choices[0].Delta.Role != "" { result += "<" + opiResp.Choices[0].Delta.Role + "> " } result += opiResp.Choices[0].Delta.Content // 计算Content Token if len(opiResp.Choices[0].Delta.ToolCalls) > 0 { // 计算ToolCalls token if opiResp.Choices[0].Delta.ToolCalls[0].Function.Name != "" { result += "name:" + opiResp.Choices[0].Delta.ToolCalls[0].Function.Name + " arguments:" } result += opiResp.Choices[0].Delta.ToolCalls[0].Function.Arguments } } else { continue } } } } else { // 处理非流式响应 body, err := io.ReadAll(resp.Body) if err != nil { fmt.Println("Error reading response body:", err) c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) return } var opiResp ChatCompletionResponse if err := json.Unmarshal(body, &opiResp); err != nil { log.Println("Error parsing JSON:", err) c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()}) return } if opiResp.Choices != nil && len(opiResp.Choices) > 0 { if opiResp.Choices[0].Message.Role != "" { result += "<" + opiResp.Choices[0].Message.Role + "> " } result += opiResp.Choices[0].Message.Content if len(opiResp.Choices[0].Message.ToolCalls) > 0 { if opiResp.Choices[0].Message.ToolCalls[0].Function.Name != "" { result += "name:" + opiResp.Choices[0].Message.ToolCalls[0].Function.Name + " arguments:" } result += opiResp.Choices[0].Message.ToolCalls[0].Function.Arguments } } for k, v := range resp.Header { c.Writer.Header().Set(k, v[0]) } c.JSON(http.StatusOK, opiResp) } usagelog.CompletionCount = tokenizer.NumTokensFromStr(result, chatReq.Model) usagelog.Cost = fmt.Sprintf("%.6f", tokenizer.Cost(usagelog.Model, usagelog.PromptCount, usagelog.CompletionCount)) if err := store.Record(&usagelog); err != nil { log.Println(err) } if err := store.SumDaily(usagelog.UserID); err != nil { log.Println(err) } }