Documentation
¶
Index ¶
Constants ¶
View Source
const DefaultModel = OpenAIMini
Variables ¶
View Source
var ErrEmptyChatHistory = errors.New("Chat history is empty")
View Source
var ErrLastChatCompleted = errors.New("Last chat is already completed with answer")
Functions ¶
func ListModelsSlug ¶
func ListModelsSlug() []string
Types ¶
type BaseModel ¶ added in v0.1.0
type BaseModel struct {
// contains filtered or unexported fields
}
func NewBaseModel ¶ added in v0.1.0
func NewBaseModel(client httpclient.IHttpClient, modelProvider IModelProvider) *BaseModel
func (*BaseModel) CompleteChat ¶ added in v0.1.0
func (m *BaseModel) CompleteChat(chatHistory *chat.ChatHistory, systemPrompt string) error
type IModel ¶
type IModel interface { /** * Complete chat receives a chat history with the last chat being the one that needs to be completed. * It also receives a system prompt that can be used to generate the answer. * Once succeded the model will fill out the answer in the last chat. */ CompleteChat(chatHistory *chat.ChatHistory, systemPrompt string) error }
func NewModel ¶
func NewModel(modelType ModelEnum, client httpclient.IHttpClient, config common.Config) IModel
type IModelProvider ¶ added in v0.1.0
type IModelProvider interface { PreparePayload(chatHistory *chat.ChatHistory, systemPrompt string) (string, error) ParseResponse(response []byte) (string, error) GetHeaders() map[string]string GetUrl() string }
* * The Provider is only responsible for preparing the payload, * formatting it accordinly to the model's requirements * and parsing the response back to the system.
Click to show internal directories.
Click to hide internal directories.