Documentation
¶
Index ¶
- Constants
- Variables
- func ListModels() map[ModelEnum]string
- type BaseModel
- func (m *BaseModel) CanStream() bool
- func (m *BaseModel) Complete(conversation *conversation.Conversation, systemPrompt string) error
- func (m *BaseModel) CompleteStreamable(conversation *conversation.Conversation, systemPrompt string) (<-chan StreamResponse, error)
- func (m *BaseModel) Model() ModelEnum
- type IApiClient
- type IModel
- type ModelEnum
- type ProviderStreamParser
- type StreamResponse
Constants ¶
View Source
const DefaultModel = OpenAIMini
Variables ¶
View Source
var ErrEmptyConversation = errors.New("there are no questions to answer")
View Source
var ErrLastQuestionAlreadyAnswered = errors.New("last conversation has already been answered")
View Source
var ErrModelNotFound = errors.New("model not found")
Functions ¶
func ListModels ¶
Types ¶
type BaseModel ¶
type BaseModel struct {
// contains filtered or unexported fields
}
func (*BaseModel) Complete ¶
func (m *BaseModel) Complete(conversation *conversation.Conversation, systemPrompt string) error
func (*BaseModel) CompleteStreamable ¶
func (m *BaseModel) CompleteStreamable(conversation *conversation.Conversation, systemPrompt string) (<-chan StreamResponse, error)
type IApiClient ¶
type IModel ¶
type IModel interface { // Complete chat receives a chat history with the last chat being the one that needs to be completed. // It also receives a system prompt that can be used to generate the answer. // Once succeded the model will fill out the answer in the last conversation. Complete(chatHistory *conversation.Conversation, systemPrompt string) error // Returns the model enum Model() ModelEnum }
type ProviderStreamParser ¶
type StreamResponse ¶
Click to show internal directories.
Click to hide internal directories.