diff --git a/README.md b/README.md index 62ff7c3..dd89b79 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ OpenCat for Team的开源实现 | 任务 | 完成情况 | | --- | --- | -|Azure OpenAI | ✅| +|[Azure OpenAI](./doc/azure.md) | ✅| | ... | ... | diff --git a/doc/azure.md b/doc/azure.md index c190ddb..d082d49 100644 --- a/doc/azure.md +++ b/doc/azure.md @@ -1,4 +1,10 @@ -# Azure OpenAI +# Azure OpenAI for team -需要获取 api-key和endpoint +需要获取 api-key和endpoint [https://[resource name].openai.azure.com/) ![](./azure_key%26endpoint.png) + + +- opencat 使用方式 + - key name以 azure.[resource name]的方式添加 + - 密钥任取一个 + - azure_openai_for_team \ No newline at end of file diff --git a/doc/azure_openai_for_team.png b/doc/azure_openai_for_team.png new file mode 100644 index 0000000..4b7df14 Binary files /dev/null and b/doc/azure_openai_for_team.png differ diff --git a/pkg/azureopenai/azureopenai_test.go b/pkg/azureopenai/azureopenai_test.go new file mode 100644 index 0000000..1fe8e7d --- /dev/null +++ b/pkg/azureopenai/azureopenai_test.go @@ -0,0 +1,54 @@ +/* +https://learn.microsoft.com/zh-cn/azure/cognitive-services/openai/chatgpt-quickstart + +curl $AZURE_OPENAI_ENDPOINT/openai/deployments/gpt-35-turbo/chat/completions?api-version=2023-03-15-preview \ + -H "Content-Type: application/json" \ + -H "api-key: $AZURE_OPENAI_KEY" \ + -d '{ + "model": "gpt-3.5-turbo", + "messages": [{"role": "user", "content": "你好"}] + }' + +*/ + +package azureopenai + +import ( + "fmt" + "testing" +) + +func TestModels(t *testing.T) { + type args struct { + endpoint string + apikey string + } + tests := []struct { + name string + args args + }{ + { + name: "test", + args: args{ + endpoint: "https://mirrors2.openai.azure.com", + apikey: "696a7729234c438cb38f24da22ee602d", + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := Models(tt.args.endpoint, tt.args.apikey) + if err != nil { + t.Errorf("Models() error = %v", err) + return + } + for _, data := range got.Data { + fmt.Println(data.Model, data.ID) + } + }) + } +} + +// curl https://mirrors2.openai.azure.com/openai/deployments?api-version=2023-03-15-preview \ +// -H "Content-Type: application/json" \ +// -H "api-key: 696a7729234c438cb38f24da22ee602d" diff --git a/router/router.go b/router/router.go index c207fda..c0a1972 100644 --- a/router/router.go +++ b/router/router.go @@ -224,10 +224,10 @@ func HandleAddKey(c *gin.Context) { return } k := &store.Key{ - ApiType: "azure_openai", - Name: body.Name, - Key: body.Key, - EndPoint: keynames[1], + ApiType: "azure_openai", + Name: body.Name, + Key: body.Key, + ResourceNmae: keynames[1], } if err := store.CreateKey(k); err != nil { c.JSON(http.StatusInternalServerError, gin.H{"error": gin.H{ @@ -388,7 +388,7 @@ func HandleProy(c *gin.Context) { // 创建 API 请求 switch onekey.ApiType { case "azure_openai": - req, err = http.NewRequest(c.Request.Method, fmt.Sprintf("https://%s.openai.azure.com/openai/deployments/%s/chat/completions?api-version=2023-03-15-preview", onekey.EndPoint, modelmap(chatreq.Model)), &body) + req, err = http.NewRequest(c.Request.Method, fmt.Sprintf("https://%s.openai.azure.com/openai/deployments/%s/chat/completions?api-version=2023-03-15-preview", onekey.ResourceNmae, modelmap(chatreq.Model)), &body) req.Header = c.Request.Header req.Header.Set("api-key", onekey.Key) case "openai": @@ -449,7 +449,6 @@ func HandleProy(c *gin.Context) { reader := bufio.NewReader(resp.Body) if resp.StatusCode == 200 && localuser { - if isStream { contentCh := fetchResponseContent(c, reader) var buffer bytes.Buffer diff --git a/store/keydb.go b/store/keydb.go index fcab5a8..c5d106b 100644 --- a/store/keydb.go +++ b/store/keydb.go @@ -12,6 +12,7 @@ type Key struct { UserId string `json:"-,omitempty"` ApiType string `gorm:"column:api_type"` EndPoint string `gorm:"column:endpoint"` + ResourceNmae string `gorm:"column:resource_name"` DeploymentName string `gorm:"column:deployment_name"` CreatedAt time.Time `json:"createdAt,omitempty"` UpdatedAt time.Time `json:"updatedAt,omitempty"`