fetch model & add apitype
This commit is contained in:
1
frontend/src/assets/github.svg
Normal file
1
frontend/src/assets/github.svg
Normal file
@@ -0,0 +1 @@
|
|||||||
|
<svg fill="currentColor" fill-rule="evenodd" height="56" viewBox="0 0 24 24" width="56" xmlns="http://www.w3.org/2000/svg" style="flex: 0 0 auto; line-height: 1;"><title>Github</title><path d="M12 0c6.63 0 12 5.276 12 11.79-.001 5.067-3.29 9.567-8.175 11.187-.6.118-.825-.25-.825-.56 0-.398.015-1.665.015-3.242 0-1.105-.375-1.813-.81-2.181 2.67-.295 5.475-1.297 5.475-5.822 0-1.297-.465-2.344-1.23-3.169.12-.295.54-1.503-.12-3.125 0 0-1.005-.324-3.3 1.209a11.32 11.32 0 00-3-.398c-1.02 0-2.04.133-3 .398-2.295-1.518-3.3-1.209-3.3-1.209-.66 1.622-.24 2.83-.12 3.125-.765.825-1.23 1.887-1.23 3.169 0 4.51 2.79 5.527 5.46 5.822-.345.294-.66.81-.765 1.577-.69.31-2.415.81-3.495-.973-.225-.354-.9-1.223-1.845-1.209-1.005.015-.405.56.015.781.51.28 1.095 1.327 1.23 1.666.24.663 1.02 1.93 4.035 1.385 0 .988.015 1.916.015 2.196 0 .31-.225.664-.825.56C3.303 21.374-.003 16.867 0 11.791 0 5.276 5.37 0 12 0z"></path></svg>
|
||||||
|
After Width: | Height: | Size: 913 B |
@@ -67,6 +67,12 @@
|
|||||||
<template v-else-if="newApiKey.type === 'gemini'">
|
<template v-else-if="newApiKey.type === 'gemini'">
|
||||||
<img src="../../assets/gemini.svg" class="w-5 h-5" alt="">
|
<img src="../../assets/gemini.svg" class="w-5 h-5" alt="">
|
||||||
</template>
|
</template>
|
||||||
|
<template v-else="newApiKey.type ==='azure'">
|
||||||
|
<img src="../../assets/azure.svg" class="w-5 h-5" alt="">
|
||||||
|
</template>
|
||||||
|
<template v-else="newApiKey.type ==='github'">
|
||||||
|
<img src="../../assets/github.svg" class="w-5 h-5" alt="">
|
||||||
|
</template>
|
||||||
<template v-else="newApiKey.type">
|
<template v-else="newApiKey.type">
|
||||||
<img src="../../assets/logo.svg" class="w-5 h-5" alt="">
|
<img src="../../assets/logo.svg" class="w-5 h-5" alt="">
|
||||||
</template>
|
</template>
|
||||||
|
|||||||
@@ -1,10 +1,13 @@
|
|||||||
package controller
|
package controller
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
"net/http"
|
"net/http"
|
||||||
"opencatd-open/internal/consts"
|
"opencatd-open/internal/consts"
|
||||||
"opencatd-open/internal/dto"
|
"opencatd-open/internal/dto"
|
||||||
"opencatd-open/internal/model"
|
"opencatd-open/internal/model"
|
||||||
|
"opencatd-open/internal/utils"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@@ -18,13 +21,22 @@ func (a Api) CreateApiKey(c *gin.Context) {
|
|||||||
dto.Fail(c, 403, "Permission denied")
|
dto.Fail(c, 403, "Permission denied")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
req := new(model.ApiKey)
|
newkey := new(model.ApiKey)
|
||||||
err := c.ShouldBind(&req)
|
err := c.ShouldBind(newkey)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
dto.Fail(c, 400, err.Error())
|
dto.Fail(c, 400, err.Error())
|
||||||
}
|
}
|
||||||
|
if slice.Contain([]string{"openai", "azure", "claude"}, *newkey.ApiType) {
|
||||||
|
sma, err := utils.FetchKeyModel(a.db, newkey)
|
||||||
|
if err == nil {
|
||||||
|
newkey.SupportModelsArray = sma
|
||||||
|
var buf = new(bytes.Buffer)
|
||||||
|
json.NewEncoder(buf).Encode(sma) //nolint:errcheck
|
||||||
|
newkey.SupportModels = utils.ToPtr(buf.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
err = a.keyService.CreateApiKey(c, req)
|
err = a.keyService.CreateApiKey(c, newkey)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
dto.Fail(c, 400, err.Error())
|
dto.Fail(c, 400, err.Error())
|
||||||
} else {
|
} else {
|
||||||
@@ -79,6 +91,10 @@ func (a Api) ListApiKey(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
str = str[:slen]
|
str = str[:slen]
|
||||||
key.ApiKey = &str
|
key.ApiKey = &str
|
||||||
|
|
||||||
|
var sma []string
|
||||||
|
json.NewDecoder(strings.NewReader(*key.SupportModels)).Decode(&sma) //nolint:errcheck
|
||||||
|
key.SupportModelsArray = sma
|
||||||
}
|
}
|
||||||
dto.Success(c, gin.H{
|
dto.Success(c, gin.H{
|
||||||
"total": total,
|
"total": total,
|
||||||
|
|||||||
@@ -186,7 +186,7 @@ func (p *Proxy) SelectApiKey(model string) error {
|
|||||||
akpikeys, err := p.apiKeyDao.FindApiKeysBySupportModel(p.db, model)
|
akpikeys, err := p.apiKeyDao.FindApiKeysBySupportModel(p.db, model)
|
||||||
|
|
||||||
if err != nil || len(akpikeys) == 0 {
|
if err != nil || len(akpikeys) == 0 {
|
||||||
if strings.HasPrefix(model, "gpt") || strings.HasPrefix(model, "o1") || strings.HasPrefix(model, "o3") {
|
if strings.HasPrefix(model, "gpt") || strings.HasPrefix(model, "o1") || strings.HasPrefix(model, "o3") || strings.HasPrefix(model, "o4") {
|
||||||
keys, err := p.apiKeyDao.FindKeys(map[string]any{"apitype = ?": "openai"})
|
keys, err := p.apiKeyDao.FindKeys(map[string]any{"apitype = ?": "openai"})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -227,7 +227,7 @@ func (p *Proxy) SelectApiKey(model string) error {
|
|||||||
|
|
||||||
func (p *Proxy) updateSupportModel() {
|
func (p *Proxy) updateSupportModel() {
|
||||||
|
|
||||||
keys, err := p.apiKeyDao.FindKeys(map[string]interface{}{"apitype in ?": "openai,azure,claude"})
|
keys, err := p.apiKeyDao.FindKeys(map[string]interface{}{"apitype in ?": []string{"openai", "azure", "claude"}})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
100
internal/utils/fetchKeyModel.go
Normal file
100
internal/utils/fetchKeyModel.go
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
package utils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"opencatd-open/internal/model"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/tidwall/gjson"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
var client = &http.Client{}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
if os.Getenv("LOCAL_PROXY") != "" {
|
||||||
|
if proxyUrl, err := url.Parse(os.Getenv("LOCAL_PROXY")); err == nil {
|
||||||
|
client.Transport = &http.Transport{Proxy: http.ProxyURL(proxyUrl)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func FetchKeyModel(db *gorm.DB, key *model.ApiKey) ([]string, error) {
|
||||||
|
|
||||||
|
var supportModels []string
|
||||||
|
var err error
|
||||||
|
if *key.ApiType == "openai" || *key.ApiType == "azure" {
|
||||||
|
supportModels, err = FetchOpenAISupportModels(db, key)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if *key.ApiType == "claude" {
|
||||||
|
supportModels, err = FetchClaudeSupportModels(db, key)
|
||||||
|
}
|
||||||
|
return supportModels, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func FetchOpenAISupportModels(db *gorm.DB, apikey *model.ApiKey) ([]string, error) {
|
||||||
|
openaiModelsUrl := "https://api.openai.com/v1/models"
|
||||||
|
// https://learn.microsoft.com/zh-cn/rest/api/azureopenai/models/list?view=rest-azureopenai-2025-02-01-preview&tabs=HTTP
|
||||||
|
azureModelsUrl := "/openai/deployments?api-version=2022-12-01"
|
||||||
|
|
||||||
|
var supportModels []string
|
||||||
|
var req *http.Request
|
||||||
|
if *apikey.ApiType == "azure" {
|
||||||
|
req, _ = http.NewRequest("GET", *apikey.Endpoint+azureModelsUrl, nil)
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
req.Header.Set("api-key", *apikey.ApiKey)
|
||||||
|
} else {
|
||||||
|
req, _ = http.NewRequest("GET", openaiModelsUrl, nil)
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
req.Header.Set("Authorization", "Bearer "+*apikey.ApiKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
if resp.StatusCode == http.StatusOK {
|
||||||
|
bytesbody, _ := io.ReadAll(resp.Body)
|
||||||
|
result := gjson.GetBytes(bytesbody, "data.#.id").Array()
|
||||||
|
for _, v := range result {
|
||||||
|
model := v.Str
|
||||||
|
model = strings.Replace(model, "-35-", "-3.5-", -1)
|
||||||
|
model = strings.Replace(model, "-41-", "-4.1-", -1)
|
||||||
|
supportModels = append(supportModels, model)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return supportModels, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func FetchClaudeSupportModels(db *gorm.DB, apikey *model.ApiKey) ([]string, error) {
|
||||||
|
// https://docs.anthropic.com/en/api/models-list
|
||||||
|
claudemodelsUrl := "https://api.anthropic.com/v1/models"
|
||||||
|
var supportModels []string
|
||||||
|
|
||||||
|
req, _ := http.NewRequest("GET", claudemodelsUrl, nil)
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
req.Header.Set("x-api-key", *apikey.ApiKey)
|
||||||
|
req.Header.Set("anthropic-version", "2023-06-01")
|
||||||
|
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
if resp.StatusCode == http.StatusOK {
|
||||||
|
bytesbody, _ := io.ReadAll(resp.Body)
|
||||||
|
result := gjson.GetBytes(bytesbody, "data.#.id").Array()
|
||||||
|
for _, v := range result {
|
||||||
|
supportModels = append(supportModels, v.Str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return supportModels, nil
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user