This commit is contained in:
c菌
2023-08-19 23:03:36 +08:00
parent 678928cafd
commit 173436f2a3
3 changed files with 185 additions and 3 deletions

View File

@@ -10,6 +10,8 @@ curl $AZURE_OPENAI_ENDPOINT/openai/deployments/gpt-35-turbo/chat/completions?api
"messages": [{"role": "user", "content": "你好"}]
}'
https://learn.microsoft.com/zh-cn/rest/api/cognitiveservices/azureopenaistable/models/list?tabs=HTTP
curl $AZURE_OPENAI_ENDPOINT/openai/deployments?api-version=2022-12-01 \
-H "Content-Type: application/json" \
-H "api-key: $AZURE_OPENAI_KEY" \

View File

@@ -46,6 +46,11 @@ import (
"strings"
"github.com/gin-gonic/gin"
"github.com/sashabaranov/go-openai"
)
var (
ClaudeUrl = "https://api.anthropic.com/v1/complete"
)
type MessageModule struct {
@@ -76,11 +81,10 @@ type CompleteResponse struct {
}
func Create() {
url := "https://api.anthropic.com/v1/complete"
payload := strings.NewReader("{\"model\":\"claude-2\",\"prompt\":\"\\n\\nHuman: Hello, world!\\n\\nAssistant:\",\"max_tokens_to_sample\":256}")
req, _ := http.NewRequest("POST", url, payload)
req, _ := http.NewRequest("POST", ClaudeUrl, payload)
req.Header.Add("accept", "application/json")
req.Header.Add("anthropic-version", "2023-06-01")
@@ -200,3 +204,168 @@ func ClaudeProxy(c *gin.Context) {
}
proxy.ServeHTTP(c.Writer, c.Request)
}
func Translate(c *gin.Context, chatreq *openai.ChatCompletionRequest) {
transReq := CompleteRequest{
Model: chatreq.Model,
Temperature: int(chatreq.Temperature),
TopP: int(chatreq.TopP),
Stream: chatreq.Stream,
MaxTokensToSample: chatreq.MaxTokens,
}
var prompt string
for _, msg := range chatreq.Messages {
switch msg.Role {
case "system":
prompt += fmt.Sprintf("\n\nSystem:%s", msg.Content)
case "user":
prompt += fmt.Sprintf("\n\nUser:%s", msg.Content)
case "assistant":
prompt += fmt.Sprintf("\n\nAssistant:%s", msg.Content)
}
}
transReq.Prompt = prompt + "\n\nAssistant:"
key, err := store.SelectKeyCache("anthropic")
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{
"error": gin.H{
"message": err.Error(),
},
})
return
}
var payload = bytes.NewBuffer(nil)
if err := json.NewEncoder(payload).Encode(transReq); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{
"error": gin.H{
"message": err.Error(),
},
})
return
}
req, _ := http.NewRequest("POST", ClaudeUrl, payload)
req.Header.Add("accept", "application/json")
req.Header.Add("anthropic-version", "2023-06-01")
req.Header.Add("x-api-key", key.Key)
req.Header.Add("content-type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{
"error": gin.H{
"message": err.Error(),
},
})
return
}
defer resp.Body.Close()
for name, values := range resp.Header {
for _, value := range values {
c.Writer.Header().Add(name, value)
}
}
head := map[string]string{
"Cache-Control": "no-store",
"access-control-allow-origin": "*",
"access-control-allow-credentials": "true",
}
for k, v := range head {
if _, ok := resp.Header[k]; !ok {
c.Writer.Header().Set(k, v)
}
}
resp.Header.Del("content-security-policy")
resp.Header.Del("content-security-policy-report-only")
resp.Header.Del("clear-site-data")
c.Writer.WriteHeader(resp.StatusCode)
writer := bufio.NewWriter(c.Writer)
defer writer.Flush()
reader := bufio.NewReader(resp.Body)
if resp.StatusCode == 200 {
if chatreq.Stream {
contentCh := fetchResponseContent(c, reader)
for content := range contentCh {
writer.WriteString(content)
writer.Flush()
}
writer.WriteString("[DONE]")
writer.Flush()
} else {
}
}
}
func fetchResponseContent(ctx *gin.Context, responseBody *bufio.Reader) <-chan string {
contentCh := make(chan string)
go func() {
defer close(contentCh)
for {
line, err := responseBody.ReadString('\n')
if err == nil {
lines := strings.Split(line, "")
for _, word := range lines {
ctx.Writer.WriteString(word)
ctx.Writer.Flush()
}
if line == "\n" {
continue
}
if strings.HasPrefix(line, "data:") {
line = strings.TrimSpace(strings.TrimPrefix(line, "data:"))
if strings.HasSuffix(line, "[DONE]") {
break
}
line = strings.TrimSpace(line)
}
dec := json.NewDecoder(strings.NewReader(line))
var data map[string]interface{}
if err := dec.Decode(&data); err == io.EOF {
log.Println("EOF:", err)
break
} else if err != nil {
fmt.Println("Error decoding response:", err)
return
}
if stop_reason, ok := data["stop_reason"].(string); ok {
if stop_reason != "" {
break
} else {
result := data["completion"].(string)
choice := openai.ChatCompletionStreamChoice{
Delta: openai.ChatCompletionStreamChoiceDelta{
Role: "assistant",
Content: result,
},
}
bytedate, _ := json.Marshal(choice)
contentCh <- "data:" + string(bytedate)
}
}
} else {
break
}
}
}()
return contentCh
}
// claude -> openai
func TranslatestopReason(reason string) string {
switch reason {
case "stop_sequence":
return "stop"
case "max_tokens":
return "length"
default:
return reason
}
}

View File

@@ -16,6 +16,7 @@ import (
"net/http/httputil"
"net/url"
"opencatd-open/pkg/azureopenai"
"opencatd-open/pkg/claude"
"opencatd-open/store"
"os"
"path/filepath"
@@ -469,6 +470,10 @@ func HandleProy(c *gin.Context) {
localuser = store.IsExistAuthCache(auth[7:])
c.Set("localuser", auth[7:])
}
if c.Request.URL.Path == "/v1/complete" {
claude.ClaudeProxy(c)
return
}
if c.Request.URL.Path == "/v1/audio/transcriptions" {
WhisperProxy(c)
return
@@ -481,12 +486,16 @@ func HandleProy(c *gin.Context) {
}})
return
}
onekey := store.FromKeyCacheRandomItemKey()
if err := c.BindJSON(&chatreq); err != nil {
c.AbortWithError(http.StatusBadRequest, err)
return
}
if strings.HasPrefix(chatreq.Model, "claude-") {
claude.Translate(c, &chatreq)
return
}
chatlog.Model = chatreq.Model
for _, m := range chatreq.Messages {
pre_prompt += m.Content + "\n"
@@ -498,6 +507,8 @@ func HandleProy(c *gin.Context) {
var body bytes.Buffer
json.NewEncoder(&body).Encode(chatreq)
onekey := store.FromKeyCacheRandomItemKey()
// 创建 API 请求
switch onekey.ApiType {
case "azure_openai":