fetch model & add apitype
This commit is contained in:
@@ -1,10 +1,13 @@
|
||||
package controller
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"opencatd-open/internal/consts"
|
||||
"opencatd-open/internal/dto"
|
||||
"opencatd-open/internal/model"
|
||||
"opencatd-open/internal/utils"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
@@ -18,13 +21,22 @@ func (a Api) CreateApiKey(c *gin.Context) {
|
||||
dto.Fail(c, 403, "Permission denied")
|
||||
return
|
||||
}
|
||||
req := new(model.ApiKey)
|
||||
err := c.ShouldBind(&req)
|
||||
newkey := new(model.ApiKey)
|
||||
err := c.ShouldBind(newkey)
|
||||
if err != nil {
|
||||
dto.Fail(c, 400, err.Error())
|
||||
}
|
||||
if slice.Contain([]string{"openai", "azure", "claude"}, *newkey.ApiType) {
|
||||
sma, err := utils.FetchKeyModel(a.db, newkey)
|
||||
if err == nil {
|
||||
newkey.SupportModelsArray = sma
|
||||
var buf = new(bytes.Buffer)
|
||||
json.NewEncoder(buf).Encode(sma) //nolint:errcheck
|
||||
newkey.SupportModels = utils.ToPtr(buf.String())
|
||||
}
|
||||
}
|
||||
|
||||
err = a.keyService.CreateApiKey(c, req)
|
||||
err = a.keyService.CreateApiKey(c, newkey)
|
||||
if err != nil {
|
||||
dto.Fail(c, 400, err.Error())
|
||||
} else {
|
||||
@@ -79,6 +91,10 @@ func (a Api) ListApiKey(c *gin.Context) {
|
||||
}
|
||||
str = str[:slen]
|
||||
key.ApiKey = &str
|
||||
|
||||
var sma []string
|
||||
json.NewDecoder(strings.NewReader(*key.SupportModels)).Decode(&sma) //nolint:errcheck
|
||||
key.SupportModelsArray = sma
|
||||
}
|
||||
dto.Success(c, gin.H{
|
||||
"total": total,
|
||||
|
||||
@@ -186,7 +186,7 @@ func (p *Proxy) SelectApiKey(model string) error {
|
||||
akpikeys, err := p.apiKeyDao.FindApiKeysBySupportModel(p.db, model)
|
||||
|
||||
if err != nil || len(akpikeys) == 0 {
|
||||
if strings.HasPrefix(model, "gpt") || strings.HasPrefix(model, "o1") || strings.HasPrefix(model, "o3") {
|
||||
if strings.HasPrefix(model, "gpt") || strings.HasPrefix(model, "o1") || strings.HasPrefix(model, "o3") || strings.HasPrefix(model, "o4") {
|
||||
keys, err := p.apiKeyDao.FindKeys(map[string]any{"apitype = ?": "openai"})
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -227,7 +227,7 @@ func (p *Proxy) SelectApiKey(model string) error {
|
||||
|
||||
func (p *Proxy) updateSupportModel() {
|
||||
|
||||
keys, err := p.apiKeyDao.FindKeys(map[string]interface{}{"apitype in ?": "openai,azure,claude"})
|
||||
keys, err := p.apiKeyDao.FindKeys(map[string]interface{}{"apitype in ?": []string{"openai", "azure", "claude"}})
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
100
internal/utils/fetchKeyModel.go
Normal file
100
internal/utils/fetchKeyModel.go
Normal file
@@ -0,0 +1,100 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"opencatd-open/internal/model"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/tidwall/gjson"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
var client = &http.Client{}
|
||||
|
||||
func init() {
|
||||
if os.Getenv("LOCAL_PROXY") != "" {
|
||||
if proxyUrl, err := url.Parse(os.Getenv("LOCAL_PROXY")); err == nil {
|
||||
client.Transport = &http.Transport{Proxy: http.ProxyURL(proxyUrl)}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func FetchKeyModel(db *gorm.DB, key *model.ApiKey) ([]string, error) {
|
||||
|
||||
var supportModels []string
|
||||
var err error
|
||||
if *key.ApiType == "openai" || *key.ApiType == "azure" {
|
||||
supportModels, err = FetchOpenAISupportModels(db, key)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
}
|
||||
}
|
||||
if *key.ApiType == "claude" {
|
||||
supportModels, err = FetchClaudeSupportModels(db, key)
|
||||
}
|
||||
return supportModels, err
|
||||
}
|
||||
|
||||
func FetchOpenAISupportModels(db *gorm.DB, apikey *model.ApiKey) ([]string, error) {
|
||||
openaiModelsUrl := "https://api.openai.com/v1/models"
|
||||
// https://learn.microsoft.com/zh-cn/rest/api/azureopenai/models/list?view=rest-azureopenai-2025-02-01-preview&tabs=HTTP
|
||||
azureModelsUrl := "/openai/deployments?api-version=2022-12-01"
|
||||
|
||||
var supportModels []string
|
||||
var req *http.Request
|
||||
if *apikey.ApiType == "azure" {
|
||||
req, _ = http.NewRequest("GET", *apikey.Endpoint+azureModelsUrl, nil)
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("api-key", *apikey.ApiKey)
|
||||
} else {
|
||||
req, _ = http.NewRequest("GET", openaiModelsUrl, nil)
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Authorization", "Bearer "+*apikey.ApiKey)
|
||||
}
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode == http.StatusOK {
|
||||
bytesbody, _ := io.ReadAll(resp.Body)
|
||||
result := gjson.GetBytes(bytesbody, "data.#.id").Array()
|
||||
for _, v := range result {
|
||||
model := v.Str
|
||||
model = strings.Replace(model, "-35-", "-3.5-", -1)
|
||||
model = strings.Replace(model, "-41-", "-4.1-", -1)
|
||||
supportModels = append(supportModels, model)
|
||||
}
|
||||
}
|
||||
return supportModels, nil
|
||||
}
|
||||
|
||||
func FetchClaudeSupportModels(db *gorm.DB, apikey *model.ApiKey) ([]string, error) {
|
||||
// https://docs.anthropic.com/en/api/models-list
|
||||
claudemodelsUrl := "https://api.anthropic.com/v1/models"
|
||||
var supportModels []string
|
||||
|
||||
req, _ := http.NewRequest("GET", claudemodelsUrl, nil)
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("x-api-key", *apikey.ApiKey)
|
||||
req.Header.Set("anthropic-version", "2023-06-01")
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode == http.StatusOK {
|
||||
bytesbody, _ := io.ReadAll(resp.Body)
|
||||
result := gjson.GetBytes(bytesbody, "data.#.id").Array()
|
||||
for _, v := range result {
|
||||
supportModels = append(supportModels, v.Str)
|
||||
}
|
||||
}
|
||||
return supportModels, nil
|
||||
}
|
||||
Reference in New Issue
Block a user