azure openai

This commit is contained in:
Sakurasan
2023-05-28 01:18:45 +08:00
parent 9c04122680
commit 2890b817f9
6 changed files with 69 additions and 9 deletions

View File

@@ -14,7 +14,7 @@ OpenCat for Team的开源实现
| 任务 | 完成情况 |
| --- | --- |
|Azure OpenAI | ✅|
|[Azure OpenAI](./doc/azure.md) | ✅|
| ... | ... |

View File

@@ -1,4 +1,10 @@
# Azure OpenAI
# Azure OpenAI for team
需要获取 api-key和endpoint
需要获取 api-key和endpoint [https://[resource name].openai.azure.com/)
![](./azure_key%26endpoint.png)
- opencat 使用方式
- key name以 azure.[resource name]的方式添加
- 密钥任取一个
- <img src="./azure_openai_for_team.png" alt="azure_openai_for_team" height="600">

Binary file not shown.

After

Width:  |  Height:  |  Size: 294 KiB

View File

@@ -0,0 +1,54 @@
/*
https://learn.microsoft.com/zh-cn/azure/cognitive-services/openai/chatgpt-quickstart
curl $AZURE_OPENAI_ENDPOINT/openai/deployments/gpt-35-turbo/chat/completions?api-version=2023-03-15-preview \
-H "Content-Type: application/json" \
-H "api-key: $AZURE_OPENAI_KEY" \
-d '{
"model": "gpt-3.5-turbo",
"messages": [{"role": "user", "content": "你好"}]
}'
*/
package azureopenai
import (
"fmt"
"testing"
)
func TestModels(t *testing.T) {
type args struct {
endpoint string
apikey string
}
tests := []struct {
name string
args args
}{
{
name: "test",
args: args{
endpoint: "https://mirrors2.openai.azure.com",
apikey: "696a7729234c438cb38f24da22ee602d",
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := Models(tt.args.endpoint, tt.args.apikey)
if err != nil {
t.Errorf("Models() error = %v", err)
return
}
for _, data := range got.Data {
fmt.Println(data.Model, data.ID)
}
})
}
}
// curl https://mirrors2.openai.azure.com/openai/deployments?api-version=2023-03-15-preview \
// -H "Content-Type: application/json" \
// -H "api-key: 696a7729234c438cb38f24da22ee602d"

View File

@@ -224,10 +224,10 @@ func HandleAddKey(c *gin.Context) {
return
}
k := &store.Key{
ApiType: "azure_openai",
Name: body.Name,
Key: body.Key,
EndPoint: keynames[1],
ApiType: "azure_openai",
Name: body.Name,
Key: body.Key,
ResourceNmae: keynames[1],
}
if err := store.CreateKey(k); err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": gin.H{
@@ -388,7 +388,7 @@ func HandleProy(c *gin.Context) {
// 创建 API 请求
switch onekey.ApiType {
case "azure_openai":
req, err = http.NewRequest(c.Request.Method, fmt.Sprintf("https://%s.openai.azure.com/openai/deployments/%s/chat/completions?api-version=2023-03-15-preview", onekey.EndPoint, modelmap(chatreq.Model)), &body)
req, err = http.NewRequest(c.Request.Method, fmt.Sprintf("https://%s.openai.azure.com/openai/deployments/%s/chat/completions?api-version=2023-03-15-preview", onekey.ResourceNmae, modelmap(chatreq.Model)), &body)
req.Header = c.Request.Header
req.Header.Set("api-key", onekey.Key)
case "openai":
@@ -449,7 +449,6 @@ func HandleProy(c *gin.Context) {
reader := bufio.NewReader(resp.Body)
if resp.StatusCode == 200 && localuser {
if isStream {
contentCh := fetchResponseContent(c, reader)
var buffer bytes.Buffer

View File

@@ -12,6 +12,7 @@ type Key struct {
UserId string `json:"-,omitempty"`
ApiType string `gorm:"column:api_type"`
EndPoint string `gorm:"column:endpoint"`
ResourceNmae string `gorm:"column:resource_name"`
DeploymentName string `gorm:"column:deployment_name"`
CreatedAt time.Time `json:"createdAt,omitempty"`
UpdatedAt time.Time `json:"updatedAt,omitempty"`