Files
opencatd-open/llm/claude/chat.go
2025-04-16 18:01:27 +08:00

139 lines
3.6 KiB
Go

// https://docs.anthropic.com/claude/reference/messages_post
package claude
import (
"context"
"encoding/json"
"opencatd-open/internal/model"
"opencatd-open/llm"
"opencatd-open/llm/openai"
"github.com/gin-gonic/gin"
)
func ChatProxy(c *gin.Context, chatReq *openai.ChatCompletionRequest) {
ChatMessages(c, chatReq)
}
func ChatTextCompletions(c *gin.Context, chatReq *openai.ChatCompletionRequest) {
}
type ChatRequest struct {
Model string `json:"model,omitempty"`
Messages any `json:"messages,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
Stream bool `json:"stream,omitempty"`
System string `json:"system,omitempty"`
TopK int `json:"top_k,omitempty"`
TopP float64 `json:"top_p,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
AnthropicVersion string `json:"anthropic_version,omitempty"`
}
func (c *ChatRequest) ByteJson() []byte {
bytejson, _ := json.Marshal(c)
return bytejson
}
type ChatMessage struct {
Role string `json:"role,omitempty"`
Content string `json:"content,omitempty"`
}
type VisionMessages struct {
Role string `json:"role,omitempty"`
Content []VisionContent `json:"content,omitempty"`
}
type VisionContent struct {
Type string `json:"type,omitempty"`
Source *VisionSource `json:"source,omitempty"`
Text string `json:"text,omitempty"`
}
type VisionSource struct {
Type string `json:"type,omitempty"`
MediaType string `json:"media_type,omitempty"`
Data string `json:"data,omitempty"`
}
type ChatResponse struct {
ID string `json:"id"`
Type string `json:"type"`
Role string `json:"role"`
Model string `json:"model"`
StopSequence any `json:"stop_sequence"`
Usage struct {
InputTokens int `json:"input_tokens"`
OutputTokens int `json:"output_tokens"`
} `json:"usage"`
Content []struct {
Type string `json:"type"`
Text string `json:"text"`
} `json:"content"`
StopReason string `json:"stop_reason"`
}
type ClaudeStreamResponse struct {
Type string `json:"type"`
Index int `json:"index"`
ContentBlock struct {
Type string `json:"type"`
Text string `json:"text"`
} `json:"content_block"`
Delta struct {
Type string `json:"type"`
Text string `json:"text"`
StopReason string `json:"stop_reason"`
StopSequence any `json:"stop_sequence"`
} `json:"delta"`
Message struct {
ID string `json:"id"`
Type string `json:"type"`
Role string `json:"role"`
Content []any `json:"content"`
Model string `json:"model"`
StopReason string `json:"stop_reason"`
StopSequence any `json:"stop_sequence"`
Usage struct {
InputTokens int `json:"input_tokens"`
OutputTokens int `json:"output_tokens"`
} `json:"usage"`
} `json:"message"`
Error struct {
Type string `json:"type"`
Message string `json:"message"`
} `json:"error"`
Usage struct {
OutputTokens int `json:"output_tokens"`
} `json:"usage"`
}
type Claude struct {
Ctx context.Context
ApiKey *model.ApiKey
tokenUsage *llm.TokenUsage
Done chan struct{}
}
func NewClaude(ctx context.Context, apiKey *model.ApiKey) (*Claude, error) {
return &Claude{
Ctx: context.Background(),
ApiKey: apiKey,
tokenUsage: &llm.TokenUsage{},
Done: make(chan struct{}),
}, nil
}
func (c *Claude) Chat(ctx context.Context, chatReq llm.ChatRequest) (*llm.ChatResponse, error) {
return nil, nil
}
func (g *Claude) StreamChat(ctx context.Context, chatReq llm.ChatRequest) (chan *llm.StreamChatResponse, error) {
return nil, nil
}