Documentation
¶
Index ¶
Constants ¶
This section is empty.
Variables ¶
View Source
var DefaultArguments = Arguments{
MetricsPath: "/metrics",
Scheme: "http",
HonorLabels: false,
HonorTimestamps: true,
HTTPClientConfig: component_config.DefaultHTTPClientConfig,
ScrapeInterval: 1 * time.Minute,
ScrapeTimeout: 10 * time.Second,
}
DefaultArguments defines the default settings for a scrape job.
Functions ¶
This section is empty.
Types ¶
type Arguments ¶
type Arguments struct {
Targets []discovery.Target `river:"targets,attr"`
ForwardTo []storage.Appendable `river:"forward_to,attr"`
// The job name to override the job label with.
JobName string `river:"job_name,attr,optional"`
// Indicator whether the scraped metrics should remain unmodified.
HonorLabels bool `river:"honor_labels,attr,optional"`
// Indicator whether the scraped timestamps should be respected.
HonorTimestamps bool `river:"honor_timestamps,attr,optional"`
// A set of query parameters with which the target is scraped.
Params url.Values `river:"params,attr,optional"`
// How frequently to scrape the targets of this scrape config.
ScrapeInterval time.Duration `river:"scrape_interval,attr,optional"`
// The timeout for scraping targets of this config.
ScrapeTimeout time.Duration `river:"scrape_timeout,attr,optional"`
// The HTTP resource path on which to fetch metrics from targets.
MetricsPath string `river:"metrics_path,attr,optional"`
// The URL scheme with which to fetch metrics from targets.
Scheme string `river:"scheme,attr,optional"`
// An uncompressed response body larger than this many bytes will cause the
// scrape to fail. 0 means no limit.
BodySizeLimit units.Base2Bytes `river:"body_size_limit,attr,optional"`
// More than this many samples post metric-relabeling will cause the scrape
// to fail.
SampleLimit uint `river:"sample_limit,attr,optional"`
// More than this many targets after the target relabeling will cause the
// scrapes to fail.
TargetLimit uint `river:"target_limit,attr,optional"`
// More than this many labels post metric-relabeling will cause the scrape
// to fail.
LabelLimit uint `river:"label_limit,attr,optional"`
// More than this label name length post metric-relabeling will cause the
// scrape to fail.
LabelNameLengthLimit uint `river:"label_name_length_limit,attr,optional"`
// More than this label value length post metric-relabeling will cause the
// scrape to fail.
LabelValueLengthLimit uint `river:"label_value_length_limit,attr,optional"`
HTTPClientConfig component_config.HTTPClientConfig `river:",squash"`
// Scrape Options
ExtraMetrics bool `river:"extra_metrics,attr,optional"`
}
Arguments holds values which are used to configure the prometheus.scrape component.
func (*Arguments) UnmarshalRiver ¶
func (arg *Arguments) UnmarshalRiver(f func(interface{}) error) error
UnmarshalRiver implements river.Unmarshaler.
type Component ¶
type Component struct {
// contains filtered or unexported fields
}
Component implements the prometheus.scrape component.
func New ¶
func New(o component.Options, args Arguments) (*Component, error)
New creates a new prometheus.scrape component.
func (*Component) DebugInfo ¶
func (c *Component) DebugInfo() interface{}
DebugInfo implements component.DebugComponent
type ScraperStatus ¶
type ScraperStatus struct {
TargetStatus []TargetStatus `river:"target,block,optional"`
}
ScraperStatus reports the status of the scraper's jobs.
type TargetStatus ¶
type TargetStatus struct {
JobName string `river:"job,attr"`
URL string `river:"url,attr"`
Health string `river:"health,attr"`
Labels map[string]string `river:"labels,attr"`
LastError string `river:"last_error,attr,optional"`
LastScrape time.Time `river:"last_scrape,attr"`
LastScrapeDuration time.Duration `river:"last_scrape_duration,attr,optional"`
}
TargetStatus reports on the status of the latest scrape for a target.
Click to show internal directories.
Click to hide internal directories.