Documentation
¶
Index ¶
- Constants
- func DetectLogFormat(line string) string
- type Chunk
- type Chunks
- type Config
- type DedupingTokenizer
- type Drain
- func (d *Drain) Clusters() []*LogCluster
- func (d *Drain) Delete(cluster *LogCluster)
- func (d *Drain) Prune()
- func (d *Drain) Train(content string, ts int64) *LogCluster
- func (d *Drain) TrainPattern(content string, samples []*logproto.PatternSample) *LogCluster
- func (d *Drain) TrainTokens(tokens []string, stringer func([]string) string, ts int64) *LogCluster
- type Limits
- type LineTokenizer
- type LogCluster
- type LogClusterCache
- type Metrics
- type Node
Constants ¶
View Source
const (
FormatLogfmt = "logfmt"
FormatJSON = "json"
FormatUnknown = "unknown"
)
View Source
const (
TimeResolution = model.Time(int64(time.Second*10) / 1e6)
)
Variables ¶
This section is empty.
Functions ¶
func DetectLogFormat ¶ added in v3.2.0
func DetectLogFormat(line string) string
DetectLogFormat guesses at how the logs are encoded based on some simple heuristics. It only runs on the first log line when a new stream is created, so it could do some more complex parsing or regex.
Types ¶
type Config ¶
type Config struct {
LogClusterDepth int
SimTh float64
MaxChildren int
ExtraDelimiters []string
MaxClusters int
ParamString string
MaxEvictionRatio float64
MaxAllowedLineLength int
// contains filtered or unexported fields
}
func DefaultConfig ¶
func DefaultConfig() *Config
type DedupingTokenizer ¶ added in v3.2.0
type DedupingTokenizer struct {
LineTokenizer
// contains filtered or unexported fields
}
type Drain ¶
type Drain struct {
// contains filtered or unexported fields
}
func New ¶
func New(tenantID string, config *Config, limits Limits, format string, metrics *Metrics) *Drain
func (*Drain) TrainPattern ¶
func (d *Drain) TrainPattern(content string, samples []*logproto.PatternSample) *LogCluster
func (*Drain) TrainTokens ¶
func (d *Drain) TrainTokens(tokens []string, stringer func([]string) string, ts int64) *LogCluster
type Limits ¶ added in v3.3.0
type Limits interface {
PatternIngesterTokenizableJSONFields(userID string) []string
}
type LineTokenizer ¶ added in v3.1.0
type LineTokenizer interface {
Tokenize(line string, tokens []string, state interface{}) ([]string, interface{})
Join(tokens []string, state interface{}) string
Clone(tokens []string, state interface{}) ([]string, interface{})
}
type LogCluster ¶
type LogCluster struct {
Size int
Tokens []string
TokenState interface{}
Stringer func([]string, interface{}) string
Chunks Chunks
// contains filtered or unexported fields
}
type LogClusterCache ¶
type LogClusterCache struct {
// contains filtered or unexported fields
}
Click to show internal directories.
Click to hide internal directories.