Documentation
¶
Index ¶
Constants ¶
This section is empty.
Variables ¶
View Source
var DefaultConfig = Config{
UseSASLHandshake: true,
KafkaVersion: sarama.V2_0_0_0.String(),
MetadataRefreshInterval: "1m",
AllowConcurrent: true,
MaxOffsets: 1000,
PruneIntervalSeconds: 30,
TopicsFilter: ".*",
GroupFilter: ".*",
}
DefaultConfig holds the default settings for the kafka_lag_exporter integration.
Functions ¶
Types ¶
type Config ¶
type Config struct {
// Address array (host:port) of Kafka server
KafkaURIs []string `yaml:"kafka_uris,omitempty"`
// Connect using SASL/PLAIN
UseSASL bool `yaml:"use_sasl,omitempty"`
// Only set this to false if using a non-Kafka SASL proxy
UseSASLHandshake bool `yaml:"use_sasl_handshake,omitempty"`
// SASL user name
SASLUsername string `yaml:"sasl_username,omitempty"`
// SASL user password
SASLPassword config_util.Secret `yaml:"sasl_password,omitempty"`
// The SASL SCRAM SHA algorithm sha256 or sha512 as mechanism
SASLMechanism string `yaml:"sasl_mechanism,omitempty"`
// Connect using TLS
UseTLS bool `yaml:"use_tls,omitempty"`
// The optional certificate authority file for TLS client authentication
CAFile string `yaml:"ca_file,omitempty"`
// The optional certificate file for TLS client authentication
CertFile string `yaml:"cert_file,omitempty"`
// The optional key file for TLS client authentication
KeyFile string `yaml:"key_file,omitempty"`
// If true, the server's certificate will not be checked for validity. This will make your HTTPS connections insecure
InsecureSkipVerify bool `yaml:"insecure_skip_verify,omitempty"`
// Kafka broker version
KafkaVersion string `yaml:"kafka_version,omitempty"`
// if you need to use a group from zookeeper
UseZooKeeperLag bool `yaml:"use_zookeeper_lag,omitempty"`
// Address array (hosts) of zookeeper server.
ZookeeperURIs []string `yaml:"zookeeper_uris,omitempty"`
// Kafka cluster name
ClusterName string `yaml:"kafka_cluster_name,omitempty"`
// Metadata refresh interval
MetadataRefreshInterval string `yaml:"metadata_refresh_interval,omitempty"`
// If true, all scrapes will trigger kafka operations otherwise, they will share results. WARN: This should be disabled on large clusters
AllowConcurrent bool `yaml:"allow_concurrency,omitempty"`
// Maximum number of offsets to store in the interpolation table for a partition
MaxOffsets int `yaml:"max_offsets,omitempty"`
// How frequently should the interpolation table be pruned, in seconds
PruneIntervalSeconds int `yaml:"prune_interval_seconds,omitempty"`
// Regex filter for topics to be monitored
TopicsFilter string `yaml:"topics_filter_regex,omitempty"`
// Regex filter for consumer groups to be monitored
GroupFilter string `yaml:"groups_filter_regex,omitempty"`
}
Config controls kafka_exporter
func (*Config) InstanceKey ¶ added in v0.21.0
func (c *Config) InstanceKey(agentKey string) (string, error)
InstanceKey returns the hostname:port of the first Kafka node, if any. If there is not exactly one Kafka node, the user must manually provide their own value for instance key in the common config.
func (*Config) Name ¶
func (c *Config) Name() string
Name returns the name of the integration that this config represents.
func (*Config) NewIntegration ¶
func (c *Config) NewIntegration(logger log.Logger) (integrations.Integration, error)
NewIntegration creates a new elasticsearch_exporter
func (*Config) UnmarshalYAML ¶
func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error
UnmarshalYAML implements yaml.Unmarshaler for Config
Click to show internal directories.
Click to hide internal directories.