3
.github/workflows/ci.yaml
vendored
3
.github/workflows/ci.yaml
vendored
@@ -7,9 +7,6 @@ on:
|
||||
- dev
|
||||
tags:
|
||||
- 'v*'
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
|
||||
|
||||
#项目任务,任务之间可以并行调度
|
||||
jobs:
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
# opencatd-open
|
||||
|
||||
<a title="Docker Image CI" target="_blank" href="https://github.com/mirrors2/opencatd-open/actions"><img alt="GitHub Workflow Status" src="https://img.shields.io/github/actions/workflow/status/mirrors2/opencatd-open/ci.yaml?label=Actions&logo=github&style=flat-square"></a>
|
||||
<a title="Docker Pulls" target="_blank" href="https://hub.docker.com/r/mirrors2/opencatd-open"><img src="https://img.shields.io/docker/pulls/mirrors2/opencatd-open.svg?logo=docker&label=docker&style=flat-square"></a>
|
||||
|
||||
opencatd-open is an open-source, team-shared service for ChatGPT API that can be safely shared with others for API usage.
|
||||
|
||||
---
|
||||
OpenCat for Team的开源实现
|
||||
|
||||
基本实现了opencatd的全部功能
|
||||
~~基本~~实现了opencatd的全部功能
|
||||
|
||||
## 快速上手
|
||||
```
|
||||
|
||||
4
doc/azure.md
Normal file
4
doc/azure.md
Normal file
@@ -0,0 +1,4 @@
|
||||
# Azure OpenAI
|
||||
|
||||
需要获取 api-key和endpoint
|
||||

|
||||
BIN
doc/azure_key&endpoint.png
Normal file
BIN
doc/azure_key&endpoint.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 42 KiB |
58
pkg/azureopenai/azureopenai.go
Normal file
58
pkg/azureopenai/azureopenai.go
Normal file
@@ -0,0 +1,58 @@
|
||||
/*
|
||||
https://learn.microsoft.com/zh-cn/azure/cognitive-services/openai/chatgpt-quickstart
|
||||
|
||||
curl $AZURE_OPENAI_ENDPOINT/openai/deployments/gpt-35-turbo/chat/completions?api-version=2023-03-15-preview \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "api-key: $AZURE_OPENAI_KEY" \
|
||||
-d '{
|
||||
"model": "gpt-3.5-turbo",
|
||||
"messages": [{"role": "user", "content": "你好"}]
|
||||
}'
|
||||
|
||||
*/
|
||||
|
||||
package azureopenai
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
var (
|
||||
ENDPOINT string
|
||||
API_KEY string
|
||||
DEPLOYMENT_NAME string
|
||||
)
|
||||
|
||||
type ModelsList struct {
|
||||
Data []struct {
|
||||
ScaleSettings struct {
|
||||
ScaleType string `json:"scale_type"`
|
||||
} `json:"scale_settings"`
|
||||
Model string `json:"model"`
|
||||
Owner string `json:"owner"`
|
||||
ID string `json:"id"`
|
||||
Status string `json:"status"`
|
||||
CreatedAt int `json:"created_at"`
|
||||
UpdatedAt int `json:"updated_at"`
|
||||
Object string `json:"object"`
|
||||
} `json:"data"`
|
||||
Object string `json:"object"`
|
||||
}
|
||||
|
||||
func Models(endpoint, apikey string) (*ModelsList, error) {
|
||||
var modelsl ModelsList
|
||||
req, _ := http.NewRequest(http.MethodGet, endpoint+"/openai/deployments?api-version=2022-12-01", nil)
|
||||
req.Header.Set("api-key", apikey)
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
err = json.NewDecoder(resp.Body).Decode(&modelsl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &modelsl, nil
|
||||
|
||||
}
|
||||
@@ -413,7 +413,7 @@ func HandleProy(c *gin.Context) {
|
||||
if resp.StatusCode == 200 && localuser {
|
||||
|
||||
if isStream {
|
||||
contentCh := fetchResponseContent(writer, reader)
|
||||
contentCh := fetchResponseContent(c, reader)
|
||||
var buffer bytes.Buffer
|
||||
for content := range contentCh {
|
||||
buffer.WriteString(content)
|
||||
@@ -536,15 +536,18 @@ func HandleUsage(c *gin.Context) {
|
||||
c.JSON(200, usage)
|
||||
}
|
||||
|
||||
func fetchResponseContent(w *bufio.Writer, responseBody *bufio.Reader) <-chan string {
|
||||
func fetchResponseContent(ctx *gin.Context, responseBody *bufio.Reader) <-chan string {
|
||||
contentCh := make(chan string)
|
||||
go func() {
|
||||
defer close(contentCh)
|
||||
for {
|
||||
line, err := responseBody.ReadString('\n')
|
||||
if err == nil {
|
||||
w.WriteString(line)
|
||||
w.Flush()
|
||||
lines := strings.Split(line, "")
|
||||
for _, word := range lines {
|
||||
ctx.Writer.WriteString(word)
|
||||
ctx.Writer.Flush()
|
||||
}
|
||||
if line == "\n" {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -3,12 +3,15 @@ package store
|
||||
import "time"
|
||||
|
||||
type Key struct {
|
||||
ID uint `gorm:"primarykey" json:"id,omitempty"`
|
||||
Key string `gorm:"unique;not null" json:"key,omitempty"`
|
||||
Name string `gorm:"unique;not null" json:"name,omitempty"`
|
||||
UserId string `json:"-,omitempty"`
|
||||
CreatedAt time.Time `json:"createdAt,omitempty"`
|
||||
UpdatedAt time.Time `json:"updatedAt,omitempty"`
|
||||
ID uint `gorm:"primarykey" json:"id,omitempty"`
|
||||
Key string `gorm:"unique;not null" json:"key,omitempty"`
|
||||
Name string `gorm:"unique;not null" json:"name,omitempty"`
|
||||
UserId string `json:"-,omitempty"`
|
||||
KeyType string
|
||||
EndPoint string
|
||||
DeploymentName string
|
||||
CreatedAt time.Time `json:"createdAt,omitempty"`
|
||||
UpdatedAt time.Time `json:"updatedAt,omitempty"`
|
||||
}
|
||||
|
||||
func GetKeyrByName(name string) (*Key, error) {
|
||||
|
||||
Reference in New Issue
Block a user