Squashed commit of feat/claude

This commit is contained in:
Sakurasan
2023-09-16 21:18:11 +08:00
parent 678928cafd
commit 545147abe0
3 changed files with 265 additions and 42 deletions

View File

@@ -10,6 +10,8 @@ curl $AZURE_OPENAI_ENDPOINT/openai/deployments/gpt-35-turbo/chat/completions?api
"messages": [{"role": "user", "content": "你好"}] "messages": [{"role": "user", "content": "你好"}]
}' }'
https://learn.microsoft.com/zh-cn/rest/api/cognitiveservices/azureopenaistable/models/list?tabs=HTTP
curl $AZURE_OPENAI_ENDPOINT/openai/deployments?api-version=2022-12-01 \ curl $AZURE_OPENAI_ENDPOINT/openai/deployments?api-version=2022-12-01 \
-H "Content-Type: application/json" \ -H "Content-Type: application/json" \
-H "api-key: $AZURE_OPENAI_KEY" \ -H "api-key: $AZURE_OPENAI_KEY" \

View File

@@ -44,8 +44,15 @@ import (
"net/url" "net/url"
"opencatd-open/store" "opencatd-open/store"
"strings" "strings"
"sync"
"time"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/sashabaranov/go-openai"
)
var (
ClaudeUrl = "https://api.anthropic.com/v1/complete"
) )
type MessageModule struct { type MessageModule struct {
@@ -56,11 +63,11 @@ type MessageModule struct {
type CompleteRequest struct { type CompleteRequest struct {
Model string `json:"model,omitempty"` //* Model string `json:"model,omitempty"` //*
Prompt string `json:"prompt,omitempty"` //* Prompt string `json:"prompt,omitempty"` //*
MaxTokensToSample int `json:"max_Tokens_To_Sample,omitempty"` //* MaxTokensToSample int `json:"max_tokens_to_sample,omitempty"` //*
StopSequences string `json:"stop_Sequences,omitempty"` StopSequences string `json:"stop_sequences,omitempty"`
Temperature int `json:"temperature,omitempty"` Temperature int `json:"temperature,omitempty"`
TopP int `json:"top_P,omitempty"` TopP int `json:"top_p,omitempty"`
TopK int `json:"top_K,omitempty"` TopK int `json:"top_k,omitempty"`
Stream bool `json:"stream,omitempty"` Stream bool `json:"stream,omitempty"`
Metadata struct { Metadata struct {
UserId string `json:"user_Id,omitempty"` UserId string `json:"user_Id,omitempty"`
@@ -76,11 +83,17 @@ type CompleteResponse struct {
} }
func Create() { func Create() {
url := "https://api.anthropic.com/v1/complete" complet := CompleteRequest{
Model: "claude-2",
Prompt: "Human: Hello, world!\\n\\nAssistant:",
Stream: true,
}
var payload *bytes.Buffer
json.NewEncoder(payload).Encode(complet)
payload := strings.NewReader("{\"model\":\"claude-2\",\"prompt\":\"\\n\\nHuman: Hello, world!\\n\\nAssistant:\",\"max_tokens_to_sample\":256}") // payload := strings.NewReader("{\"model\":\"claude-2\",\"prompt\":\"\\n\\nHuman: Hello, world!\\n\\nAssistant:\",\"max_tokens_to_sample\":256}")
req, _ := http.NewRequest("POST", url, payload) req, _ := http.NewRequest("POST", ClaudeUrl, payload)
req.Header.Add("accept", "application/json") req.Header.Add("accept", "application/json")
req.Header.Add("anthropic-version", "2023-06-01") req.Header.Add("anthropic-version", "2023-06-01")
@@ -90,9 +103,24 @@ func Create() {
res, _ := http.DefaultClient.Do(req) res, _ := http.DefaultClient.Do(req)
defer res.Body.Close() defer res.Body.Close()
body, _ := io.ReadAll(res.Body) // body, _ := io.ReadAll(res.Body)
fmt.Println(string(body)) // fmt.Println(string(body))
reader := bufio.NewReader(res.Body)
for {
line, err := reader.ReadString('\n')
if err == nil {
if strings.HasPrefix(line, "data:") {
fmt.Println(line)
// var result CompleteResponse
// json.Unmarshal()
} else {
continue
}
} else {
break
}
}
} }
func ClaudeProxy(c *gin.Context) { func ClaudeProxy(c *gin.Context) {
@@ -200,3 +228,161 @@ func ClaudeProxy(c *gin.Context) {
} }
proxy.ServeHTTP(c.Writer, c.Request) proxy.ServeHTTP(c.Writer, c.Request)
} }
func TransReq(chatreq *openai.ChatCompletionRequest) (*bytes.Buffer, error) {
transReq := CompleteRequest{
Model: chatreq.Model,
Temperature: int(chatreq.Temperature),
TopP: int(chatreq.TopP),
Stream: chatreq.Stream,
MaxTokensToSample: chatreq.MaxTokens,
}
if transReq.MaxTokensToSample == 0 {
transReq.MaxTokensToSample = 1000000
}
var prompt string
for _, msg := range chatreq.Messages {
switch msg.Role {
case "system":
prompt += fmt.Sprintf("\n\nHuman:%s", msg.Content)
case "user":
prompt += fmt.Sprintf("\n\nHuman:%s", msg.Content)
case "assistant":
prompt += fmt.Sprintf("\n\nAssistant:%s", msg.Content)
}
}
transReq.Prompt = prompt + "\n\nAssistant:"
var payload = bytes.NewBuffer(nil)
if err := json.NewEncoder(payload).Encode(transReq); err != nil {
return nil, err
}
return payload, nil
}
func TransRsp(c *gin.Context, isStream bool, reader *bufio.Reader) {
if !isStream {
var completersp CompleteResponse
var chatrsp openai.ChatCompletionResponse
json.NewDecoder(reader).Decode(&completersp)
chatrsp.Model = completersp.Model
chatrsp.ID = completersp.LogID
chatrsp.Object = "chat.completion"
chatrsp.Created = time.Now().Unix()
choice := openai.ChatCompletionChoice{
Index: 0,
FinishReason: "stop",
Message: openai.ChatCompletionMessage{
Role: "assistant",
Content: completersp.Completion,
},
}
chatrsp.Choices = append(chatrsp.Choices, choice)
var payload *bytes.Buffer
if err := json.NewEncoder(payload).Encode(chatrsp); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{
"error": gin.H{
"message": err.Error(),
},
})
return
}
c.JSON(http.StatusOK, payload)
return
} else {
var (
wg sync.WaitGroup
dataChan = make(chan string)
stopChan = make(chan bool)
)
wg.Add(2)
go func() {
defer wg.Done()
for {
line, err := reader.ReadString('\n')
if err == nil {
if strings.HasPrefix(line, "data: ") {
var result CompleteResponse
json.NewDecoder(strings.NewReader(line[6:])).Decode(&result)
if result.StopReason == "" {
if result.Completion != "" {
chatrsp := openai.ChatCompletionStreamResponse{
ID: result.LogID,
Model: result.Model,
Object: "chat.completion",
Created: time.Now().Unix(),
}
choice := openai.ChatCompletionStreamChoice{
Delta: openai.ChatCompletionStreamChoiceDelta{
Role: "assistant",
Content: result.Completion,
},
FinishReason: "",
}
chatrsp.Choices = append(chatrsp.Choices, choice)
bytedate, _ := json.Marshal(chatrsp)
dataChan <- string(bytedate)
}
} else {
chatrsp := openai.ChatCompletionStreamResponse{
ID: result.LogID,
Model: result.Model,
Object: "chat.completion",
Created: time.Now().Unix(),
}
choice := openai.ChatCompletionStreamChoice{
Delta: openai.ChatCompletionStreamChoiceDelta{
Role: "assistant",
Content: result.Completion,
},
}
choice.FinishReason = openai.FinishReason(TranslatestopReason(result.StopReason))
chatrsp.Choices = append(chatrsp.Choices, choice)
bytedate, _ := json.Marshal(chatrsp)
dataChan <- string(bytedate)
dataChan <- "[DONE]"
break
}
} else {
continue
}
} else {
break
}
}
close(dataChan)
stopChan <- true
close(stopChan)
}()
go func() {
defer wg.Done()
Loop:
for {
select {
case data := <-dataChan:
if data != "" {
c.Writer.WriteString("data: " + data)
c.Writer.WriteString("\n\n")
c.Writer.Flush()
}
case <-stopChan:
break Loop
}
}
}()
wg.Wait()
}
}
// claude -> openai
func TranslatestopReason(reason string) string {
switch reason {
case "stop_sequence":
return "stop"
case "max_tokens":
return "length"
default:
return reason
}
}

View File

@@ -16,6 +16,7 @@ import (
"net/http/httputil" "net/http/httputil"
"net/url" "net/url"
"opencatd-open/pkg/azureopenai" "opencatd-open/pkg/azureopenai"
"opencatd-open/pkg/claude"
"opencatd-open/store" "opencatd-open/store"
"os" "os"
"path/filepath" "path/filepath"
@@ -286,7 +287,7 @@ func HandleAddKey(c *gin.Context) {
return return
} }
k := &store.Key{ k := &store.Key{
ApiType: "azure_openai", ApiType: "azure",
Name: body.Name, Name: body.Name,
Key: body.Key, Key: body.Key,
ResourceNmae: keynames[1], ResourceNmae: keynames[1],
@@ -298,7 +299,7 @@ func HandleAddKey(c *gin.Context) {
}}) }})
return return
} }
} else if strings.HasPrefix(body.Name, "anthropic.") { } else if strings.HasPrefix(body.Name, "claude.") {
keynames := strings.Split(body.Name, ".") keynames := strings.Split(body.Name, ".")
if len(keynames) < 2 { if len(keynames) < 2 {
c.JSON(http.StatusBadRequest, gin.H{"error": gin.H{ c.JSON(http.StatusBadRequest, gin.H{"error": gin.H{
@@ -310,7 +311,8 @@ func HandleAddKey(c *gin.Context) {
body.Endpoint = "https://api.anthropic.com" body.Endpoint = "https://api.anthropic.com"
} }
k := &store.Key{ k := &store.Key{
ApiType: "anthropic", // ApiType: "anthropic",
ApiType: "claude",
Name: body.Name, Name: body.Name,
Key: body.Key, Key: body.Key,
ResourceNmae: keynames[1], ResourceNmae: keynames[1],
@@ -459,6 +461,7 @@ func HandleProy(c *gin.Context) {
chatreq = openai.ChatCompletionRequest{} chatreq = openai.ChatCompletionRequest{}
chatres = openai.ChatCompletionResponse{} chatres = openai.ChatCompletionResponse{}
chatlog store.Tokens chatlog store.Tokens
onekey store.Key
pre_prompt string pre_prompt string
req *http.Request req *http.Request
err error err error
@@ -469,6 +472,10 @@ func HandleProy(c *gin.Context) {
localuser = store.IsExistAuthCache(auth[7:]) localuser = store.IsExistAuthCache(auth[7:])
c.Set("localuser", auth[7:]) c.Set("localuser", auth[7:])
} }
if c.Request.URL.Path == "/v1/complete" {
claude.ClaudeProxy(c)
return
}
if c.Request.URL.Path == "/v1/audio/transcriptions" { if c.Request.URL.Path == "/v1/audio/transcriptions" {
WhisperProxy(c) WhisperProxy(c)
return return
@@ -481,12 +488,12 @@ func HandleProy(c *gin.Context) {
}}) }})
return return
} }
onekey := store.FromKeyCacheRandomItemKey()
if err := c.BindJSON(&chatreq); err != nil { if err := c.BindJSON(&chatreq); err != nil {
c.AbortWithError(http.StatusBadRequest, err) c.AbortWithError(http.StatusBadRequest, err)
return return
} }
chatlog.Model = chatreq.Model chatlog.Model = chatreq.Model
for _, m := range chatreq.Messages { for _, m := range chatreq.Messages {
pre_prompt += m.Content + "\n" pre_prompt += m.Content + "\n"
@@ -498,8 +505,28 @@ func HandleProy(c *gin.Context) {
var body bytes.Buffer var body bytes.Buffer
json.NewEncoder(&body).Encode(chatreq) json.NewEncoder(&body).Encode(chatreq)
if strings.HasPrefix(chatreq.Model, "claude-") {
onekey, err = store.SelectKeyCache("claude")
if err != nil {
c.AbortWithError(http.StatusForbidden, err)
}
} else {
onekey = store.FromKeyCacheRandomItemKey()
}
// 创建 API 请求 // 创建 API 请求
switch onekey.ApiType { switch onekey.ApiType {
case "claude":
payload, _ := claude.TransReq(&chatreq)
buildurl := "https://api.anthropic.com/v1/complete"
req, err = http.NewRequest("POST", buildurl, payload)
req.Header.Add("accept", "application/json")
req.Header.Add("anthropic-version", "2023-06-01")
req.Header.Add("x-api-key", onekey.Key)
req.Header.Add("content-type", "application/json")
case "azure":
fallthrough
case "azure_openai": case "azure_openai":
var buildurl string var buildurl string
var apiVersion = "2023-05-15" var apiVersion = "2023-05-15"
@@ -533,7 +560,7 @@ func HandleProy(c *gin.Context) {
req, err = http.NewRequest(c.Request.Method, baseUrl+c.Request.RequestURI, c.Request.Body) req, err = http.NewRequest(c.Request.Method, baseUrl+c.Request.RequestURI, c.Request.Body)
if err != nil { if err != nil {
log.Println(err) log.Println(err)
c.JSON(http.StatusOK, gin.H{"error": err.Error()}) c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return return
} }
req.Header = c.Request.Header req.Header = c.Request.Header
@@ -542,7 +569,7 @@ func HandleProy(c *gin.Context) {
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
log.Println(err) log.Println(err)
c.JSON(http.StatusOK, gin.H{"error": err.Error()}) c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return return
} }
defer resp.Body.Close() defer resp.Body.Close()
@@ -574,6 +601,13 @@ func HandleProy(c *gin.Context) {
reader := bufio.NewReader(resp.Body) reader := bufio.NewReader(resp.Body)
if resp.StatusCode == 200 && localuser { if resp.StatusCode == 200 && localuser {
switch onekey.ApiType {
case "claude":
claude.TransRsp(c, isStream, reader)
return
case "openai", "azure", "azure_openai":
fallthrough
default:
if isStream { if isStream {
contentCh := fetchResponseContent(c, reader) contentCh := fetchResponseContent(c, reader)
var buffer bytes.Buffer var buffer bytes.Buffer
@@ -610,6 +644,7 @@ func HandleProy(c *gin.Context) {
if err := store.SumDaily(chatlog.UserID); err != nil { if err := store.SumDaily(chatlog.UserID); err != nil {
log.Println(err) log.Println(err)
} }
}
} }
// 返回 API 响应主体 // 返回 API 响应主体