package llm import ( "context" "opencatd-open/internal/model" ) type LLM interface { Chat(ctx context.Context, req ChatRequest) (*ChatResponse, error) StreamChat(ctx context.Context, req ChatRequest) (chan *StreamChatResponse, error) GetTokenUsage() *TokenUsage } type llm struct { ApiKey *model.ApiKey Usage *model.Usage tools any // TODO Messages []any // TODO llm LLM }