Documentation
¶
Overview ¶
argument types. to let functions describe their inputs and outputs
Index ¶
- Variables
- func IntPositive(e *expr) error
- func IsAggFunc(e *expr) error
- func IsConsolFunc(e *expr) error
- func IsIntervalString(e *expr) error
- func IsOperator(e *expr) error
- func NewAggregateConstructor(name string) func() GraphiteFunc
- func NewConsolidateByConstructor(by string) func() GraphiteFunc
- func NewFilterSeriesConstructor(fn string, operator string) func() GraphiteFunc
- func NewGroupByNodesConstructor(groupByNode bool) func() GraphiteFunc
- func NewHighestLowestConstructor(fn string, highest bool) func() GraphiteFunc
- func NewRemoveAboveBelowPercentileConstructor(above bool) func() GraphiteFunc
- func NewRemoveAboveBelowValueConstructor(above bool) func() GraphiteFunc
- func NewSortByConstructor(fn string, reverse bool) func() GraphiteFunc
- func NonNegativePercent(e *expr) error
- func Normalize(dataMap DataMap, in []models.Series) []models.Series
- func NormalizeTo(dataMap DataMap, in models.Series, interval uint32) models.Series
- func NormalizeTwo(dataMap DataMap, a, b models.Series) (models.Series, models.Series)
- func Parse(e string) (*expr, string, error)
- func ParseMany(targets []string) ([]*expr, error)
- func Pool(p *sync.Pool)
- func SortSeriesWithConsolidator(series []models.Series, c consolidation.Consolidator, reverse bool)
- func WithinZeroOneInclusiveInterval(e *expr) error
- type Arg
- type ArgBool
- type ArgFloat
- type ArgIn
- type ArgInt
- type ArgInts
- type ArgQuotelessString
- type ArgRegex
- type ArgSeries
- type ArgSeriesList
- type ArgSeriesLists
- type ArgString
- type ArgStringOrInt
- type ArgStrings
- type ArgStringsOrInts
- type Context
- type DataMap
- type ErrBadArgument
- type ErrBadArgumentStr
- type ErrBadKwarg
- type ErrBadRegex
- type ErrKwargSpecifiedTwice
- type ErrUnknownFunction
- type ErrUnknownKwarg
- type FuncAbsolute
- type FuncAggregate
- type FuncAlias
- type FuncAliasByMetric
- type FuncAliasByNode
- type FuncAliasSub
- type FuncAsPercent
- type FuncConsolidateBy
- type FuncConstantLine
- type FuncCountSeries
- type FuncDerivative
- type FuncDivideSeries
- type FuncDivideSeriesLists
- type FuncFallbackSeries
- type FuncFilterSeries
- type FuncGet
- type FuncGrep
- type FuncGroup
- type FuncGroupByNodes
- type FuncGroupByTags
- type FuncHighestLowest
- type FuncIntegral
- type FuncInvert
- type FuncIsNonNull
- type FuncKeepLastValue
- type FuncMinMax
- type FuncMovingAverage
- type FuncNonNegativeDerivative
- type FuncOffset
- type FuncPerSecond
- type FuncRemoveAboveBelowPercentile
- type FuncRemoveAboveBelowValue
- type FuncRemoveEmptySeries
- type FuncRound
- type FuncScale
- type FuncScaleToSeconds
- type FuncSmartSummarize
- type FuncSortBy
- type FuncSortByName
- type FuncSummarize
- type FuncTransformNull
- type FuncUnique
- type GraphiteFunc
- func NewAbsolute() GraphiteFunc
- func NewAggregate() GraphiteFunc
- func NewAlias() GraphiteFunc
- func NewAliasByMetric() GraphiteFunc
- func NewAliasByNode() GraphiteFunc
- func NewAliasSub() GraphiteFunc
- func NewAsPercent() GraphiteFunc
- func NewConsolidateBy() GraphiteFunc
- func NewConstantLine() GraphiteFunc
- func NewCountSeries() GraphiteFunc
- func NewDerivative() GraphiteFunc
- func NewDivideSeries() GraphiteFunc
- func NewDivideSeriesLists() GraphiteFunc
- func NewExclude() GraphiteFunc
- func NewFallbackSeries() GraphiteFunc
- func NewFilterSeries() GraphiteFunc
- func NewGet(req Req) GraphiteFunc
- func NewGrep() GraphiteFunc
- func NewGroup() GraphiteFunc
- func NewGroupByTags() GraphiteFunc
- func NewIntegral() GraphiteFunc
- func NewInvert() GraphiteFunc
- func NewIsNonNull() GraphiteFunc
- func NewKeepLastValue() GraphiteFunc
- func NewMinMax() GraphiteFunc
- func NewMovingAverage() GraphiteFunc
- func NewNonNegativeDerivative() GraphiteFunc
- func NewOffset() GraphiteFunc
- func NewPerSecond() GraphiteFunc
- func NewRemoveEmptySeries() GraphiteFunc
- func NewRound() GraphiteFunc
- func NewScale() GraphiteFunc
- func NewScaleToSeconds() GraphiteFunc
- func NewSmartSummarize() GraphiteFunc
- func NewSortByName() GraphiteFunc
- func NewSummarize() GraphiteFunc
- func NewTransformNull() GraphiteFunc
- func NewUnique() GraphiteFunc
- type MetricRequest
- type Optimizations
- type Plan
- type Req
- type ScoredSeries
- type Validator
Constants ¶
This section is empty.
Variables ¶
var ( ErrMissingArg = errors.NewBadRequest("argument missing") ErrTooManyArg = errors.NewBadRequest("too many arguments") ErrMissingTimeseries = errors.NewBadRequest("missing time series argument") ErrWildcardNotAllowed = errors.NewBadRequest("found wildcard where series expected") ErrMissingExpr = errors.NewBadRequest("missing expression") ErrMissingComma = errors.NewBadRequest("missing comma") ErrMissingQuote = errors.NewBadRequest("missing quote") ErrUnexpectedCharacter = errors.NewBadRequest("unexpected character") ErrIllegalCharacter = errors.NewBadRequest("illegal character for function name") )
var ErrIntPositive = errors.NewBadRequest("integer must be positive")
var ErrInvalidAggFunc = errors.NewBadRequest("Invalid aggregation func")
var ErrNonNegativePercent = errors.NewBadRequest("The requested percent is required to be greater than 0")
var ErrWithinZeroOneInclusiveInterval = errors.NewBadRequest("value must lie within interval [0,1]")
Functions ¶
func IntPositive ¶
func IntPositive(e *expr) error
IntPositive validates whether an int is positive (greater than zero)
func IsConsolFunc ¶
func IsConsolFunc(e *expr) error
func IsIntervalString ¶
func IsIntervalString(e *expr) error
func IsOperator ¶
func IsOperator(e *expr) error
func NewAggregateConstructor ¶
func NewAggregateConstructor(name string) func() GraphiteFunc
NewAggregateConstructor takes an agg string and returns a constructor function
func NewConsolidateByConstructor ¶ added in v0.12.0
func NewConsolidateByConstructor(by string) func() GraphiteFunc
func NewFilterSeriesConstructor ¶ added in v0.12.0
func NewFilterSeriesConstructor(fn string, operator string) func() GraphiteFunc
func NewGroupByNodesConstructor ¶ added in v1.0.0
func NewGroupByNodesConstructor(groupByNode bool) func() GraphiteFunc
func NewHighestLowestConstructor ¶
func NewHighestLowestConstructor(fn string, highest bool) func() GraphiteFunc
func NewRemoveAboveBelowPercentileConstructor ¶
func NewRemoveAboveBelowPercentileConstructor(above bool) func() GraphiteFunc
func NewRemoveAboveBelowValueConstructor ¶
func NewRemoveAboveBelowValueConstructor(above bool) func() GraphiteFunc
func NewSortByConstructor ¶
func NewSortByConstructor(fn string, reverse bool) func() GraphiteFunc
func NonNegativePercent ¶
func NonNegativePercent(e *expr) error
func Normalize ¶ added in v1.0.0
Normalize normalizes series to the same common LCM interval - if they don't already have the same interval any adjusted series gets created in a series drawn out of the pool and is added to the dataMap so it can be reclaimed
func NormalizeTo ¶ added in v1.0.0
NormalizeTo normalizes the given series to the desired interval will pad front and strip from back as needed, to assure the output is canonical for the given interval the following MUST be true when calling this: * interval > in.Interval * interval % in.Interval == 0
func NormalizeTwo ¶ added in v1.0.0
func Parse ¶
Parses an expression string and turns it into an expression also returns any leftover data that could not be parsed
func ParseMany ¶
ParseMany parses a slice of strings into a slice of expressions (recursively) not included: validation that requested functions exist, correct args are passed, etc.
func Pool ¶
Pool tells the expr library which pool to use for temporary []schema.Point this lets the expr package effectively create and drop point slices as needed it is recommended you use the same pool in your application, e.g. to get slices when loading the initial data, and to return the buffers back to the pool once the output from this package's processing is no longer needed.
func SortSeriesWithConsolidator ¶
func SortSeriesWithConsolidator(series []models.Series, c consolidation.Consolidator, reverse bool)
func WithinZeroOneInclusiveInterval ¶ added in v1.0.0
func WithinZeroOneInclusiveInterval(e *expr) error
Types ¶
type Arg ¶
Arg is an argument to a GraphiteFunc note how every implementation has a val property. this property should point to value accessible to the function. the value will be set up by the planner; it assures that by the time Func.Exec() is called, the function has access to all needed inputs, whether simple values, or in the case of ArgSeries* inputs other functions to call which will feed it data.
type ArgFloat ¶
type ArgFloat struct {
// contains filtered or unexported fields
}
floating point number; potentially with decimals
type ArgIn ¶
type ArgIn struct {
// contains filtered or unexported fields
}
ArgIn is a special type that allows one of multiple arguments
type ArgInt ¶
type ArgInt struct {
// contains filtered or unexported fields
}
ArgInt is a number without decimals
type ArgInts ¶
type ArgInts struct {
// contains filtered or unexported fields
}
ArgInts represents one or more numbers without decimals
type ArgQuotelessString ¶ added in v0.13.1
type ArgQuotelessString struct {
// contains filtered or unexported fields
}
When an argument is a special value without quotes (such as None or INF) This should NOT be used together with ArgSeries, ArgSeriesList, ArgSeriesLists inside an ArgIn as that causes ambiguity
func (ArgQuotelessString) Key ¶ added in v0.13.1
func (a ArgQuotelessString) Key() string
func (ArgQuotelessString) Optional ¶ added in v0.13.1
func (a ArgQuotelessString) Optional() bool
type ArgRegex ¶
type ArgRegex struct {
// contains filtered or unexported fields
}
like string, but should result in a regex
type ArgSeries ¶
type ArgSeries struct {
// contains filtered or unexported fields
}
ArgSeries is a single series argument not generally used as input since graphite functions typically take multiple series as input but is useful to describe output
type ArgSeriesList ¶
type ArgSeriesList struct {
// contains filtered or unexported fields
}
ArgSeriesList is a list of series argument, it can be 0..N series
func (ArgSeriesList) Key ¶
func (a ArgSeriesList) Key() string
func (ArgSeriesList) Optional ¶
func (a ArgSeriesList) Optional() bool
type ArgSeriesLists ¶
type ArgSeriesLists struct {
// contains filtered or unexported fields
}
ArgSeriesLists represents one or more lists of series inputs.
func (ArgSeriesLists) Key ¶
func (a ArgSeriesLists) Key() string
func (ArgSeriesLists) Optional ¶
func (a ArgSeriesLists) Optional() bool
type ArgStringOrInt ¶ added in v1.0.0
type ArgStringOrInt struct {
// contains filtered or unexported fields
}
mixed string or int
func (ArgStringOrInt) Key ¶ added in v1.0.0
func (a ArgStringOrInt) Key() string
func (ArgStringOrInt) Optional ¶ added in v1.0.0
func (a ArgStringOrInt) Optional() bool
type ArgStrings ¶
type ArgStrings struct {
// contains filtered or unexported fields
}
ArgStrings represents one or more strings
func (ArgStrings) Key ¶
func (a ArgStrings) Key() string
func (ArgStrings) Optional ¶
func (a ArgStrings) Optional() bool
type ArgStringsOrInts ¶
type ArgStringsOrInts struct {
// contains filtered or unexported fields
}
Array of mixed strings or ints
func (ArgStringsOrInts) Key ¶
func (a ArgStringsOrInts) Key() string
func (ArgStringsOrInts) Optional ¶
func (a ArgStringsOrInts) Optional() bool
type Context ¶
type Context struct { PNGroup models.PNGroup // pre-normalization group. if the data can be safely pre-normalized MDP uint32 // if we can MDP-optimize, reflects runtime consolidation MaxDataPoints. 0 otherwise // contains filtered or unexported fields }
Context describes a series timeframe and consolidator
type DataMap ¶ added in v1.0.0
Datamap contains all series to feed into the processing chain or generated therein: * fetched series, grouped by their expr.Req, such that expr.FuncGet can find the data it needs and feed it into subsequent expr.GraphiteFunc functions * additional series generated while handling the request (e.g. function processing, normalization), keyed by an empty expr.Req (such that can't be mistakenly picked up by FuncGet) all of these series will need to be returned to the pool once we're done with all processing and have generated our response body by calling Clean() eventually we'd like to be able to reuse intermediately computed data. e.g. queries like target=movingAvg(sum(foo), 10)&target=sum(foo) but for now we don't support this
func NewDataMap ¶ added in v1.0.0
func NewDataMap() DataMap
func (DataMap) CheckForOverlappingPoints ¶ added in v1.0.0
CheckForOverlappingPoints runs through all series in the pool and makes sure there are no series that are overlapping (otherwise returning them would cause issues) This is not efficient and should probably only be called from tests
type ErrBadArgument ¶
type ErrBadArgument struct {
// contains filtered or unexported fields
}
func (ErrBadArgument) Error ¶
func (e ErrBadArgument) Error() string
func (ErrBadArgument) HTTPStatusCode ¶ added in v1.0.0
func (e ErrBadArgument) HTTPStatusCode() int
type ErrBadArgumentStr ¶
type ErrBadArgumentStr struct {
// contains filtered or unexported fields
}
func (ErrBadArgumentStr) Error ¶
func (e ErrBadArgumentStr) Error() string
func (ErrBadArgumentStr) HTTPStatusCode ¶ added in v1.0.0
func (e ErrBadArgumentStr) HTTPStatusCode() int
type ErrBadKwarg ¶
type ErrBadKwarg struct {
// contains filtered or unexported fields
}
func (ErrBadKwarg) Error ¶
func (e ErrBadKwarg) Error() string
func (ErrBadKwarg) HTTPStatusCode ¶ added in v1.0.0
func (e ErrBadKwarg) HTTPStatusCode() int
type ErrBadRegex ¶ added in v1.0.0
type ErrBadRegex struct {
// contains filtered or unexported fields
}
func (ErrBadRegex) HTTPStatusCode ¶ added in v1.0.0
func (e ErrBadRegex) HTTPStatusCode() int
type ErrKwargSpecifiedTwice ¶
type ErrKwargSpecifiedTwice struct {
// contains filtered or unexported fields
}
func (ErrKwargSpecifiedTwice) Error ¶
func (e ErrKwargSpecifiedTwice) Error() string
func (ErrKwargSpecifiedTwice) HTTPStatusCode ¶ added in v1.0.0
func (e ErrKwargSpecifiedTwice) HTTPStatusCode() int
type ErrUnknownFunction ¶
type ErrUnknownFunction string
func (ErrUnknownFunction) Error ¶
func (e ErrUnknownFunction) Error() string
func (ErrUnknownFunction) HTTPStatusCode ¶ added in v1.0.0
func (e ErrUnknownFunction) HTTPStatusCode() int
type ErrUnknownKwarg ¶
type ErrUnknownKwarg struct {
// contains filtered or unexported fields
}
func (ErrUnknownKwarg) Error ¶
func (e ErrUnknownKwarg) Error() string
func (ErrUnknownKwarg) HTTPStatusCode ¶ added in v1.0.0
func (e ErrUnknownKwarg) HTTPStatusCode() int
type FuncAbsolute ¶ added in v0.12.0
type FuncAbsolute struct {
// contains filtered or unexported fields
}
func (*FuncAbsolute) Context ¶ added in v0.12.0
func (s *FuncAbsolute) Context(context Context) Context
func (*FuncAbsolute) Exec ¶ added in v0.12.0
func (s *FuncAbsolute) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncAbsolute) Signature ¶ added in v0.12.0
func (s *FuncAbsolute) Signature() ([]Arg, []Arg)
type FuncAggregate ¶
type FuncAggregate struct {
// contains filtered or unexported fields
}
func (*FuncAggregate) Context ¶
func (s *FuncAggregate) Context(context Context) Context
func (*FuncAggregate) Signature ¶
func (s *FuncAggregate) Signature() ([]Arg, []Arg)
type FuncAliasByMetric ¶ added in v1.0.0
type FuncAliasByMetric struct {
// contains filtered or unexported fields
}
func (*FuncAliasByMetric) Context ¶ added in v1.0.0
func (s *FuncAliasByMetric) Context(context Context) Context
func (*FuncAliasByMetric) Exec ¶ added in v1.0.0
func (s *FuncAliasByMetric) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncAliasByMetric) Signature ¶ added in v1.0.0
func (s *FuncAliasByMetric) Signature() ([]Arg, []Arg)
type FuncAliasByNode ¶
type FuncAliasByNode struct {
// contains filtered or unexported fields
}
func (*FuncAliasByNode) Context ¶
func (s *FuncAliasByNode) Context(context Context) Context
func (*FuncAliasByNode) Exec ¶
func (s *FuncAliasByNode) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncAliasByNode) Signature ¶
func (s *FuncAliasByNode) Signature() ([]Arg, []Arg)
type FuncAliasSub ¶
type FuncAliasSub struct {
// contains filtered or unexported fields
}
func (*FuncAliasSub) Context ¶
func (s *FuncAliasSub) Context(context Context) Context
func (*FuncAliasSub) Signature ¶
func (s *FuncAliasSub) Signature() ([]Arg, []Arg)
type FuncAsPercent ¶
type FuncAsPercent struct {
// contains filtered or unexported fields
}
func (*FuncAsPercent) Context ¶
func (s *FuncAsPercent) Context(context Context) Context
func (*FuncAsPercent) Signature ¶
func (s *FuncAsPercent) Signature() ([]Arg, []Arg)
type FuncConsolidateBy ¶
type FuncConsolidateBy struct {
// contains filtered or unexported fields
}
func (*FuncConsolidateBy) Context ¶
func (s *FuncConsolidateBy) Context(context Context) Context
func (*FuncConsolidateBy) Exec ¶
func (s *FuncConsolidateBy) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncConsolidateBy) Signature ¶
func (s *FuncConsolidateBy) Signature() ([]Arg, []Arg)
type FuncConstantLine ¶ added in v1.0.0
type FuncConstantLine struct {
// contains filtered or unexported fields
}
func (*FuncConstantLine) Context ¶ added in v1.0.0
func (s *FuncConstantLine) Context(context Context) Context
func (*FuncConstantLine) Exec ¶ added in v1.0.0
func (s *FuncConstantLine) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncConstantLine) Signature ¶ added in v1.0.0
func (s *FuncConstantLine) Signature() ([]Arg, []Arg)
type FuncCountSeries ¶
type FuncCountSeries struct {
// contains filtered or unexported fields
}
func (*FuncCountSeries) Context ¶
func (s *FuncCountSeries) Context(context Context) Context
func (*FuncCountSeries) Exec ¶
func (s *FuncCountSeries) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncCountSeries) Signature ¶
func (s *FuncCountSeries) Signature() ([]Arg, []Arg)
type FuncDerivative ¶
type FuncDerivative struct {
// contains filtered or unexported fields
}
func (*FuncDerivative) Context ¶
func (s *FuncDerivative) Context(context Context) Context
func (*FuncDerivative) Exec ¶
func (s *FuncDerivative) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncDerivative) Signature ¶
func (s *FuncDerivative) Signature() ([]Arg, []Arg)
type FuncDivideSeries ¶
type FuncDivideSeries struct {
// contains filtered or unexported fields
}
FuncDivideSeries divides 1-N dividend series by 1 dividend series
func (*FuncDivideSeries) Context ¶
func (s *FuncDivideSeries) Context(context Context) Context
func (*FuncDivideSeries) Exec ¶
func (s *FuncDivideSeries) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncDivideSeries) Signature ¶
func (s *FuncDivideSeries) Signature() ([]Arg, []Arg)
type FuncDivideSeriesLists ¶
type FuncDivideSeriesLists struct {
// contains filtered or unexported fields
}
FuncDivideSeriesLists divides dividends by divisors, pairwise
func (*FuncDivideSeriesLists) Context ¶
func (s *FuncDivideSeriesLists) Context(context Context) Context
func (*FuncDivideSeriesLists) Exec ¶
func (s *FuncDivideSeriesLists) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncDivideSeriesLists) Signature ¶
func (s *FuncDivideSeriesLists) Signature() ([]Arg, []Arg)
type FuncFallbackSeries ¶ added in v0.13.0
type FuncFallbackSeries struct {
// contains filtered or unexported fields
}
func (*FuncFallbackSeries) Context ¶ added in v0.13.0
func (s *FuncFallbackSeries) Context(context Context) Context
func (*FuncFallbackSeries) Exec ¶ added in v0.13.0
func (s *FuncFallbackSeries) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncFallbackSeries) Signature ¶ added in v0.13.0
func (s *FuncFallbackSeries) Signature() ([]Arg, []Arg)
type FuncFilterSeries ¶
type FuncFilterSeries struct {
// contains filtered or unexported fields
}
func (*FuncFilterSeries) Context ¶
func (s *FuncFilterSeries) Context(context Context) Context
func (*FuncFilterSeries) Exec ¶
func (s *FuncFilterSeries) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncFilterSeries) Signature ¶
func (s *FuncFilterSeries) Signature() ([]Arg, []Arg)
type FuncGet ¶
type FuncGet struct {
// contains filtered or unexported fields
}
internal function just for getting data
type FuncGroup ¶ added in v0.13.0
type FuncGroup struct {
// contains filtered or unexported fields
}
type FuncGroupByNodes ¶ added in v1.0.0
type FuncGroupByNodes struct {
// contains filtered or unexported fields
}
func (*FuncGroupByNodes) Context ¶ added in v1.0.0
func (s *FuncGroupByNodes) Context(context Context) Context
func (*FuncGroupByNodes) Exec ¶ added in v1.0.0
func (s *FuncGroupByNodes) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncGroupByNodes) Signature ¶ added in v1.0.0
func (s *FuncGroupByNodes) Signature() ([]Arg, []Arg)
type FuncGroupByTags ¶
type FuncGroupByTags struct {
// contains filtered or unexported fields
}
func (*FuncGroupByTags) Context ¶
func (s *FuncGroupByTags) Context(context Context) Context
func (*FuncGroupByTags) Exec ¶
func (s *FuncGroupByTags) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncGroupByTags) Signature ¶
func (s *FuncGroupByTags) Signature() ([]Arg, []Arg)
type FuncHighestLowest ¶
type FuncHighestLowest struct {
// contains filtered or unexported fields
}
func (*FuncHighestLowest) Context ¶
func (s *FuncHighestLowest) Context(context Context) Context
func (*FuncHighestLowest) Exec ¶
func (s *FuncHighestLowest) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncHighestLowest) Signature ¶
func (s *FuncHighestLowest) Signature() ([]Arg, []Arg)
type FuncIntegral ¶ added in v0.13.0
type FuncIntegral struct {
// contains filtered or unexported fields
}
func (*FuncIntegral) Context ¶ added in v0.13.0
func (s *FuncIntegral) Context(context Context) Context
func (*FuncIntegral) Exec ¶ added in v0.13.0
func (s *FuncIntegral) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncIntegral) Signature ¶ added in v0.13.0
func (s *FuncIntegral) Signature() ([]Arg, []Arg)
type FuncInvert ¶ added in v1.0.0
type FuncInvert struct {
// contains filtered or unexported fields
}
func (*FuncInvert) Context ¶ added in v1.0.0
func (s *FuncInvert) Context(context Context) Context
func (*FuncInvert) Exec ¶ added in v1.0.0
func (s *FuncInvert) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncInvert) Signature ¶ added in v1.0.0
func (s *FuncInvert) Signature() ([]Arg, []Arg)
type FuncIsNonNull ¶
type FuncIsNonNull struct {
// contains filtered or unexported fields
}
func (*FuncIsNonNull) Context ¶
func (s *FuncIsNonNull) Context(context Context) Context
func (*FuncIsNonNull) Signature ¶
func (s *FuncIsNonNull) Signature() ([]Arg, []Arg)
type FuncKeepLastValue ¶
type FuncKeepLastValue struct {
// contains filtered or unexported fields
}
func (*FuncKeepLastValue) Context ¶
func (s *FuncKeepLastValue) Context(context Context) Context
func (*FuncKeepLastValue) Exec ¶
func (s *FuncKeepLastValue) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncKeepLastValue) Signature ¶
func (s *FuncKeepLastValue) Signature() ([]Arg, []Arg)
type FuncMinMax ¶ added in v1.0.0
type FuncMinMax struct {
// contains filtered or unexported fields
}
func (*FuncMinMax) Context ¶ added in v1.0.0
func (s *FuncMinMax) Context(context Context) Context
func (*FuncMinMax) Exec ¶ added in v1.0.0
func (s *FuncMinMax) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncMinMax) Signature ¶ added in v1.0.0
func (s *FuncMinMax) Signature() ([]Arg, []Arg)
type FuncMovingAverage ¶
type FuncMovingAverage struct {
// contains filtered or unexported fields
}
func (*FuncMovingAverage) Context ¶
func (s *FuncMovingAverage) Context(context Context) Context
func (*FuncMovingAverage) Exec ¶
func (s *FuncMovingAverage) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncMovingAverage) Signature ¶
func (s *FuncMovingAverage) Signature() ([]Arg, []Arg)
note if input is 1 series, then output is too. not sure how to communicate that
type FuncNonNegativeDerivative ¶
type FuncNonNegativeDerivative struct {
// contains filtered or unexported fields
}
func (*FuncNonNegativeDerivative) Context ¶
func (s *FuncNonNegativeDerivative) Context(context Context) Context
func (*FuncNonNegativeDerivative) Exec ¶
func (s *FuncNonNegativeDerivative) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncNonNegativeDerivative) Signature ¶
func (s *FuncNonNegativeDerivative) Signature() ([]Arg, []Arg)
type FuncOffset ¶ added in v1.0.0
type FuncOffset struct {
// contains filtered or unexported fields
}
func (*FuncOffset) Context ¶ added in v1.0.0
func (s *FuncOffset) Context(context Context) Context
func (*FuncOffset) Exec ¶ added in v1.0.0
func (s *FuncOffset) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncOffset) Signature ¶ added in v1.0.0
func (s *FuncOffset) Signature() ([]Arg, []Arg)
type FuncPerSecond ¶
type FuncPerSecond struct {
// contains filtered or unexported fields
}
func (*FuncPerSecond) Context ¶
func (s *FuncPerSecond) Context(context Context) Context
func (*FuncPerSecond) Signature ¶
func (s *FuncPerSecond) Signature() ([]Arg, []Arg)
type FuncRemoveAboveBelowPercentile ¶
type FuncRemoveAboveBelowPercentile struct {
// contains filtered or unexported fields
}
func (*FuncRemoveAboveBelowPercentile) Context ¶
func (s *FuncRemoveAboveBelowPercentile) Context(context Context) Context
func (*FuncRemoveAboveBelowPercentile) Exec ¶
func (s *FuncRemoveAboveBelowPercentile) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncRemoveAboveBelowPercentile) Signature ¶
func (s *FuncRemoveAboveBelowPercentile) Signature() ([]Arg, []Arg)
type FuncRemoveAboveBelowValue ¶
type FuncRemoveAboveBelowValue struct {
// contains filtered or unexported fields
}
func (*FuncRemoveAboveBelowValue) Context ¶
func (s *FuncRemoveAboveBelowValue) Context(context Context) Context
func (*FuncRemoveAboveBelowValue) Exec ¶
func (s *FuncRemoveAboveBelowValue) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncRemoveAboveBelowValue) Signature ¶
func (s *FuncRemoveAboveBelowValue) Signature() ([]Arg, []Arg)
type FuncRemoveEmptySeries ¶ added in v1.0.0
type FuncRemoveEmptySeries struct {
// contains filtered or unexported fields
}
func (*FuncRemoveEmptySeries) Context ¶ added in v1.0.0
func (s *FuncRemoveEmptySeries) Context(context Context) Context
func (*FuncRemoveEmptySeries) Exec ¶ added in v1.0.0
func (s *FuncRemoveEmptySeries) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncRemoveEmptySeries) Signature ¶ added in v1.0.0
func (s *FuncRemoveEmptySeries) Signature() ([]Arg, []Arg)
type FuncRound ¶ added in v1.0.0
type FuncRound struct {
// contains filtered or unexported fields
}
type FuncScaleToSeconds ¶
type FuncScaleToSeconds struct {
// contains filtered or unexported fields
}
func (*FuncScaleToSeconds) Context ¶
func (s *FuncScaleToSeconds) Context(context Context) Context
func (*FuncScaleToSeconds) Exec ¶
func (s *FuncScaleToSeconds) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncScaleToSeconds) Signature ¶
func (s *FuncScaleToSeconds) Signature() ([]Arg, []Arg)
type FuncSmartSummarize ¶
type FuncSmartSummarize struct {
// contains filtered or unexported fields
}
func (*FuncSmartSummarize) Context ¶
func (s *FuncSmartSummarize) Context(context Context) Context
func (*FuncSmartSummarize) Exec ¶
func (s *FuncSmartSummarize) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncSmartSummarize) Signature ¶
func (s *FuncSmartSummarize) Signature() ([]Arg, []Arg)
type FuncSortBy ¶
type FuncSortBy struct {
// contains filtered or unexported fields
}
func (*FuncSortBy) Context ¶
func (s *FuncSortBy) Context(context Context) Context
func (*FuncSortBy) Signature ¶
func (s *FuncSortBy) Signature() ([]Arg, []Arg)
type FuncSortByName ¶
type FuncSortByName struct {
// contains filtered or unexported fields
}
func (*FuncSortByName) Context ¶
func (s *FuncSortByName) Context(context Context) Context
func (*FuncSortByName) Exec ¶
func (s *FuncSortByName) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncSortByName) Signature ¶
func (s *FuncSortByName) Signature() ([]Arg, []Arg)
type FuncSummarize ¶
type FuncSummarize struct {
// contains filtered or unexported fields
}
func (*FuncSummarize) Context ¶
func (s *FuncSummarize) Context(context Context) Context
func (*FuncSummarize) Signature ¶
func (s *FuncSummarize) Signature() ([]Arg, []Arg)
type FuncTransformNull ¶
type FuncTransformNull struct {
// contains filtered or unexported fields
}
func (*FuncTransformNull) Context ¶
func (s *FuncTransformNull) Context(context Context) Context
func (*FuncTransformNull) Exec ¶
func (s *FuncTransformNull) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncTransformNull) Signature ¶
func (s *FuncTransformNull) Signature() ([]Arg, []Arg)
type FuncUnique ¶ added in v1.0.0
type FuncUnique struct {
// contains filtered or unexported fields
}
func (*FuncUnique) Context ¶ added in v1.0.0
func (s *FuncUnique) Context(context Context) Context
func (*FuncUnique) Exec ¶ added in v1.0.0
func (s *FuncUnique) Exec(dataMap DataMap) ([]models.Series, error)
func (*FuncUnique) Signature ¶ added in v1.0.0
func (s *FuncUnique) Signature() ([]Arg, []Arg)
type GraphiteFunc ¶
type GraphiteFunc interface { // Signature declares input and output arguments (return values) // input args can be optional in which case they can be specified positionally or via keys if you want to specify params that come after un-specified optional params // the val pointers of each input Arg should point to a location accessible to the function, // so that the planner can set up the inputs for your function based on user input. // NewPlan() will only create the plan if the expressions it parsed correspond to the signatures provided by the function Signature() ([]Arg, []Arg) // Context allows a func to alter the context that will be passed down the expression tree. // this function will be called after validating and setting up all non-series and non-serieslist parameters. // (as typically, context alterations require integer/string/bool/etc parameters, and shall affect series[list] parameters) // examples: // * movingAverage(foo,5min) -> the 5min arg will be parsed, so we can request 5min of earlier data, which will affect the request for foo. // * consolidateBy(bar, "sum") -> the "sum" arg will be parsed, so we can pass on the fact that bar needs to be sum-consolidated Context(c Context) Context // Exec executes the function. the function should call any input functions, do its processing, and return output. // IMPORTANT: for performance and correctness, functions should // * not modify slices of points that they get from their inputs // * use the pool to get new slices in which to store any new/modified dat // * add the newly created slices into the dataMap so they can be reclaimed after the output is consumed // * not modify other properties on its input series, such as Tags map or Meta Exec(dataMap DataMap) ([]models.Series, error) }
GraphiteFunc defines a graphite processing function
func NewAbsolute ¶ added in v0.12.0
func NewAbsolute() GraphiteFunc
func NewAggregate ¶ added in v1.0.0
func NewAggregate() GraphiteFunc
func NewAlias ¶
func NewAlias() GraphiteFunc
func NewAliasByMetric ¶ added in v1.0.0
func NewAliasByMetric() GraphiteFunc
func NewAliasByNode ¶
func NewAliasByNode() GraphiteFunc
func NewAliasSub ¶
func NewAliasSub() GraphiteFunc
func NewAsPercent ¶
func NewAsPercent() GraphiteFunc
func NewConsolidateBy ¶
func NewConsolidateBy() GraphiteFunc
func NewConstantLine ¶ added in v1.0.0
func NewConstantLine() GraphiteFunc
func NewCountSeries ¶
func NewCountSeries() GraphiteFunc
func NewDerivative ¶
func NewDerivative() GraphiteFunc
func NewDivideSeries ¶
func NewDivideSeries() GraphiteFunc
func NewDivideSeriesLists ¶
func NewDivideSeriesLists() GraphiteFunc
func NewExclude ¶
func NewExclude() GraphiteFunc
func NewFallbackSeries ¶ added in v0.13.0
func NewFallbackSeries() GraphiteFunc
func NewFilterSeries ¶
func NewFilterSeries() GraphiteFunc
func NewGet ¶
func NewGet(req Req) GraphiteFunc
func NewGrep ¶
func NewGrep() GraphiteFunc
func NewGroup ¶ added in v0.13.0
func NewGroup() GraphiteFunc
func NewGroupByTags ¶
func NewGroupByTags() GraphiteFunc
func NewIntegral ¶ added in v0.13.0
func NewIntegral() GraphiteFunc
func NewInvert ¶ added in v1.0.0
func NewInvert() GraphiteFunc
func NewIsNonNull ¶
func NewIsNonNull() GraphiteFunc
func NewKeepLastValue ¶
func NewKeepLastValue() GraphiteFunc
func NewMinMax ¶ added in v1.0.0
func NewMinMax() GraphiteFunc
func NewMovingAverage ¶
func NewMovingAverage() GraphiteFunc
func NewNonNegativeDerivative ¶
func NewNonNegativeDerivative() GraphiteFunc
func NewOffset ¶ added in v1.0.0
func NewOffset() GraphiteFunc
func NewPerSecond ¶
func NewPerSecond() GraphiteFunc
func NewRemoveEmptySeries ¶ added in v1.0.0
func NewRemoveEmptySeries() GraphiteFunc
func NewRound ¶ added in v1.0.0
func NewRound() GraphiteFunc
func NewScale ¶
func NewScale() GraphiteFunc
func NewScaleToSeconds ¶
func NewScaleToSeconds() GraphiteFunc
func NewSmartSummarize ¶
func NewSmartSummarize() GraphiteFunc
func NewSortByName ¶
func NewSortByName() GraphiteFunc
func NewSummarize ¶
func NewSummarize() GraphiteFunc
func NewTransformNull ¶
func NewTransformNull() GraphiteFunc
func NewUnique ¶ added in v1.0.0
func NewUnique() GraphiteFunc
type MetricRequest ¶
type Optimizations ¶ added in v1.0.0
func (Optimizations) ApplyUserPrefs ¶ added in v1.0.0
func (o Optimizations) ApplyUserPrefs(s string) (Optimizations, error)
type Plan ¶
type Plan struct { Reqs []Req // data that needs to be fetched before functions can be executed MaxDataPoints uint32 From uint32 // global request scoped from To uint32 // global request scoped to // contains filtered or unexported fields }
func NewPlan ¶
func NewPlan(exprs []*expr, from, to, mdp uint32, stable bool, optimizations Optimizations) (Plan, error)
NewPlan validates the expressions and comes up with the initial (potentially non-optimal) execution plan which is just a list of requests and the expressions. traverse tree and as we go down: * make sure function exists * validation of arguments * allow functions to modify the Context (change data range or consolidation) * future version: allow functions to mark safe to pre-aggregate using consolidateBy or not
type Req ¶
type Req struct { Query string // whatever was parsed as the query out of a graphite target. e.g. target=sum(foo.{b,a}r.*) -> foo.{b,a}r.* -> this will go straight to index lookup From uint32 To uint32 Cons consolidation.Consolidator // can be 0 to mean undefined PNGroup models.PNGroup MDP uint32 // if we can MDP-optimize, reflects runtime consolidation MaxDataPoints. 0 otherwise. }
Req represents a request for one/more series
func NewReq ¶
func NewReq(query string, from, to uint32, cons consolidation.Consolidator, PNGroup models.PNGroup, MDP uint32) Req
NewReq creates a new Req. pass cons=0 to leave consolidator undefined, leaving up to the caller (in graphite's case, it would cause a lookup into storage-aggregation.conf)
func NewReqFromContext ¶ added in v1.0.0
func NewReqFromSerie ¶ added in v1.0.0
NewReqFromSeries generates a Req back from a series a models.Series has all the properties attached to it to find out which Req it came from
type ScoredSeries ¶
type ScoredSeries struct {
// contains filtered or unexported fields
}
Source Files
¶
- datamap.go
- expr.go
- exprtype_string.go
- func_absolute.go
- func_aggregate.go
- func_alias.go
- func_aliasbymetric.go
- func_aliasbynode.go
- func_aliassub.go
- func_aspercent.go
- func_consolidateby.go
- func_constantline.go
- func_countseries.go
- func_derivative.go
- func_divideseries.go
- func_divideserieslists.go
- func_fallbackSeries.go
- func_filterseries.go
- func_get.go
- func_grep.go
- func_group.go
- func_groupbynodes.go
- func_groupbytags.go
- func_highestlowest.go
- func_integral.go
- func_invert.go
- func_isnonnull.go
- func_keeplastvalue.go
- func_minmax.go
- func_movingaverage.go
- func_nonnegativederivative.go
- func_offset.go
- func_persecond.go
- func_removeabovebelowpercentile.go
- func_removeabovebelowvalue.go
- func_removeemptyseries.go
- func_round.go
- func_scale.go
- func_scaletoseconds.go
- func_smartsummarize.go
- func_sortby.go
- func_sortbyname.go
- func_summarize.go
- func_transformnull.go
- func_unique.go
- funcs.go
- normalize.go
- parse.go
- plan.go
- pool.go
- seriesaggregators.go
- test.go
- types.go
- validator.go
- xfilesfactor.go