fetch model & add apitype
This commit is contained in:
100
internal/utils/fetchKeyModel.go
Normal file
100
internal/utils/fetchKeyModel.go
Normal file
@@ -0,0 +1,100 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"opencatd-open/internal/model"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/tidwall/gjson"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
var client = &http.Client{}
|
||||
|
||||
func init() {
|
||||
if os.Getenv("LOCAL_PROXY") != "" {
|
||||
if proxyUrl, err := url.Parse(os.Getenv("LOCAL_PROXY")); err == nil {
|
||||
client.Transport = &http.Transport{Proxy: http.ProxyURL(proxyUrl)}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func FetchKeyModel(db *gorm.DB, key *model.ApiKey) ([]string, error) {
|
||||
|
||||
var supportModels []string
|
||||
var err error
|
||||
if *key.ApiType == "openai" || *key.ApiType == "azure" {
|
||||
supportModels, err = FetchOpenAISupportModels(db, key)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
}
|
||||
}
|
||||
if *key.ApiType == "claude" {
|
||||
supportModels, err = FetchClaudeSupportModels(db, key)
|
||||
}
|
||||
return supportModels, err
|
||||
}
|
||||
|
||||
func FetchOpenAISupportModels(db *gorm.DB, apikey *model.ApiKey) ([]string, error) {
|
||||
openaiModelsUrl := "https://api.openai.com/v1/models"
|
||||
// https://learn.microsoft.com/zh-cn/rest/api/azureopenai/models/list?view=rest-azureopenai-2025-02-01-preview&tabs=HTTP
|
||||
azureModelsUrl := "/openai/deployments?api-version=2022-12-01"
|
||||
|
||||
var supportModels []string
|
||||
var req *http.Request
|
||||
if *apikey.ApiType == "azure" {
|
||||
req, _ = http.NewRequest("GET", *apikey.Endpoint+azureModelsUrl, nil)
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("api-key", *apikey.ApiKey)
|
||||
} else {
|
||||
req, _ = http.NewRequest("GET", openaiModelsUrl, nil)
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Authorization", "Bearer "+*apikey.ApiKey)
|
||||
}
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode == http.StatusOK {
|
||||
bytesbody, _ := io.ReadAll(resp.Body)
|
||||
result := gjson.GetBytes(bytesbody, "data.#.id").Array()
|
||||
for _, v := range result {
|
||||
model := v.Str
|
||||
model = strings.Replace(model, "-35-", "-3.5-", -1)
|
||||
model = strings.Replace(model, "-41-", "-4.1-", -1)
|
||||
supportModels = append(supportModels, model)
|
||||
}
|
||||
}
|
||||
return supportModels, nil
|
||||
}
|
||||
|
||||
func FetchClaudeSupportModels(db *gorm.DB, apikey *model.ApiKey) ([]string, error) {
|
||||
// https://docs.anthropic.com/en/api/models-list
|
||||
claudemodelsUrl := "https://api.anthropic.com/v1/models"
|
||||
var supportModels []string
|
||||
|
||||
req, _ := http.NewRequest("GET", claudemodelsUrl, nil)
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("x-api-key", *apikey.ApiKey)
|
||||
req.Header.Set("anthropic-version", "2023-06-01")
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode == http.StatusOK {
|
||||
bytesbody, _ := io.ReadAll(resp.Body)
|
||||
result := gjson.GetBytes(bytesbody, "data.#.id").Array()
|
||||
for _, v := range result {
|
||||
supportModels = append(supportModels, v.Str)
|
||||
}
|
||||
}
|
||||
return supportModels, nil
|
||||
}
|
||||
Reference in New Issue
Block a user