Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion tools/pika_exporter/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ cat << EOF | gofmt > version.go
package main

const (
PikaExporterVersion = "$(PIKA_EXPORTER_MAJOR).$(PIKA_EXPORTER_MINOR).$(PIKA_EXPORTER_PATCH)"
PikaExporterVersion = "$(PIKA_EXPORTER_MAJOR).$(PIKA_EXPORTER_MINOR).$(PIKA_EXPORTER_PATCH)"
BuildVersion = "$(BRANCH)"
BuildCommitSha = "$(GITREV)"
BuildDate = "$(BUILDTIME)"
Expand Down
2 changes: 1 addition & 1 deletion tools/pika_exporter/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ prometheus --config.file=./grafana/prometheus.yml
| check.scan-count | PIKA_EXPORTER_CHECK_SCAN_COUNT | 100 | When check keys and executing SCAN command, scan-count assigned to COUNT. | --check.scan-count 200 |
| web.listen-address | PIKA_EXPORTER_WEB_LISTEN_ADDRESS | :9121 | Address to listen on for web interface and telemetry. | --web.listen-address ":9121" |
| web.telemetry-path | PIKA_EXPORTER_WEB_TELEMETRY_PATH | /metrics | Path under which to expose metrics. | --web.telemetry-path "/metrics" |
| log.level | PIKA_EXPORTER_LOG_LEVEL | info | Log level, valid options:`panic` `fatal` `error` `warn` `warning` `info` `debug`. | --log.level "debug" |
| log.level | PIKA_EXPORTER_LOG_LEVEL | error | Log level, valid options:`panic` `fatal` `error` `warn` `warning` `info` `debug`. | --log.level "debug" |
| log.format | PIKA_EXPORTER_LOG_FORMAT | json | Log format, valid options:`txt` `json`. | --log.format "json" |
| version | | false | Show version information and exit. | --version |

Expand Down
54 changes: 51 additions & 3 deletions tools/pika_exporter/config/info.toml
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# Pika Exporter Configuration
server = true
data = true
clients = true
Expand All @@ -7,6 +8,53 @@ replication = true
keyspace = true
cache = true

execcount = false
commandstats = false
rocksdb = false
execcount = true
commandstats = true
rocksdb = true


# The address of the pika instance to monitor
# If not set, the exporter will use the discovery mechanism
# pika_addr = "127.0.0.1:9379"

# The password for the pika instance
# pika_password = ""

# The alias for the pika instance
# pika_alias = ""

# The address of the codis topom
# codis_addr = "http://127.0.0.1:18087"

# The namespace for the metrics
# namespace = "pika"

# The path to the metrics definition file
# metrics_file = ""

# The port to listen on for the web interface
# web_listen_address = ":9121"

# The path under which to expose metrics
# web_telemetry_path = "/metrics"

# The log level
# log_level = "info"

# The log format
# log_format = "text"

# The path to the config file
# config = "config/info.toml"

# The key space stats clock
# keyspace_stats_clock = -1

# The key patterns to check
# check_key_patterns = ""

# The keys to check
# check_keys = ""

# The scan count for checking keys
# check_scan_count = 100
15 changes: 15 additions & 0 deletions tools/pika_exporter/discovery/codis_dashboard.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,21 @@ type CmdInfo struct {
Usecs_percall int64 `json:"usecs_percall"`
Fails int64 `json:"fails"`
MaxDelay int64 `json:"max_delay"`
AVG int64 `json:"avg"`
TP90 int64 `json:"tp90"`
TP99 int64 `json:"tp99"`
TP999 int64 `json:"tp999"`
TP9999 int64 `json:"tp9999"`
TP100 int64 `json:"tp100"`

Delay50ms int64 `json:"delay50ms"`
Delay100ms int64 `json:"delay100ms"`
Delay200ms int64 `json:"delay200ms"`
Delay300ms int64 `json:"delay300ms"`
Delay500ms int64 `json:"delay500ms"`
Delay1s int64 `json:"delay1s"`
Delay2s int64 `json:"delay2s"`
Delay3s int64 `json:"delay3s"`
}

type ProxyOpsInfo struct {
Expand Down
9 changes: 6 additions & 3 deletions tools/pika_exporter/exporter/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@ import (
"strings"
"time"

"github.com/garyburd/redigo/redis"
"github.com/gomodule/redigo/redis"
log "github.com/sirupsen/logrus"
)

const (
Expand Down Expand Up @@ -132,7 +133,8 @@ func (c *client) InfoNoneCommandList() (string, error) {
if flag {
info, err := c.InfoCommand(section)
if err != nil {
return "", err
log.Warnf("Failed to get INFO %s: %v", section, err)
continue // Skip this section but continue with others
}
rst = append(rst, info)
}
Expand Down Expand Up @@ -160,7 +162,8 @@ func (c *client) InfoAllCommandList() (string, error) {
if flag {
info, err := c.InfoCommand(section)
if err != nil {
return "", err
log.Warnf("Failed to get INFO %s: %v", section, err)
continue // Skip this section but continue with others
}
rst = append(rst, info)
}
Expand Down
43 changes: 35 additions & 8 deletions tools/pika_exporter/exporter/conf.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,29 @@ type InfoConfig struct {
}

func LoadConfig() error {
log.Println("Update configuration")
err := readConfig(InfoConfigPath)
if err != nil {
return err
log.Debugln("Update configuration")

// Initialize default configuration
InfoConf = &InfoConfig{
Server: true,
Data: true,
Clients: true,
Stats: true,
CPU: true,
Replication: true,
Keyspace: true,
Execcount: true,
Commandstats: true,
Rocksdb: false,
Cache: true,
}

// Try to load config file if path is provided
if InfoConfigPath != "" {
err := readConfig(InfoConfigPath)
if err != nil {
log.Warnf("Failed to load config file %s: %s, using default configuration", InfoConfigPath, err)
}
}

InfoConf.CheckInfo()
Expand Down Expand Up @@ -80,10 +99,18 @@ func (c *InfoConfig) CheckInfo() {
c.InfoAll = false
c.Info = false

if c.Server && c.Data && c.Clients && c.Stats && c.CPU && c.Replication && c.Keyspace {
// For Pika versions, we need to enable Info if any of the core modules are enabled
// This ensures basic metrics are collected
if c.Server || c.Data || c.Clients || c.Stats || c.CPU || c.Replication || c.Keyspace {
c.Info = true
if c.Execcount && c.Commandstats && c.Rocksdb && c.Cache {
c.InfoAll = true
}
}

// InfoAll should only be enabled if all modules are enabled
// For Pika 3.2.x versions, we should NOT use InfoAll because INFO ALL command
// has different output format compared to newer versions
// The version detection will be handled in the exporter, but here we ensure
// that Info is enabled when needed
if c.Info && c.Execcount && c.Commandstats && c.Rocksdb && c.Cache {
c.InfoAll = true
}
}
60 changes: 60 additions & 0 deletions tools/pika_exporter/exporter/metrics/cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,16 @@ var collectCacheMetrics = map[string]MetricConfig{
ValueName: "cache_db_num",
},
},
"cache_keys": {
Parser: &normalParser{},
MetricMeta: &MetaData{
Name: "cache_keys",
Help: "pika serve instance cache keys count",
Type: metricTypeGauge,
Labels: []string{LabelNameAddr, LabelNameAlias},
ValueName: "cache_keys",
},
},
"cache_memory": {
Parser: &normalParser{},
MetricMeta: &MetaData{
Expand All @@ -44,6 +54,26 @@ var collectCacheMetrics = map[string]MetricConfig{
ValueName: "cache_memory",
},
},
"hits": {
Parser: &normalParser{},
MetricMeta: &MetaData{
Name: "cache_hits",
Help: "pika serve instance cache hit count",
Type: metricTypeCounter,
Labels: []string{LabelNameAddr, LabelNameAlias},
ValueName: "hits",
},
},
"all_cmds": {
Parser: &normalParser{},
MetricMeta: &MetaData{
Name: "cache_all_cmds",
Help: "pika serve instance cache all commands count",
Type: metricTypeCounter,
Labels: []string{LabelNameAddr, LabelNameAlias},
ValueName: "all_cmds",
},
},
"hits_per_sec": {
Parser: &normalParser{},
MetricMeta: &MetaData{
Expand All @@ -54,6 +84,16 @@ var collectCacheMetrics = map[string]MetricConfig{
ValueName: "hits_per_sec",
},
},
"read_cmd_per_sec": {
Parser: &normalParser{},
MetricMeta: &MetaData{
Name: "read_cmd_per_sec",
Help: "pika serve instance cache read command count per second",
Type: metricTypeGauge,
Labels: []string{LabelNameAddr, LabelNameAlias},
ValueName: "read_cmd_per_sec",
},
},
"hitratio_per_second": {
Parser: &regexParser{
name: "hitratio_per_sec",
Expand Down Expand Up @@ -84,4 +124,24 @@ var collectCacheMetrics = map[string]MetricConfig{
ValueName: "hitratio_all",
},
},
"load_keys_per_sec": {
Parser: &normalParser{},
MetricMeta: &MetaData{
Name: "load_keys_per_sec",
Help: "pika serve instance cache load keys count per second",
Type: metricTypeGauge,
Labels: []string{LabelNameAddr, LabelNameAlias},
ValueName: "load_keys_per_sec",
},
},
"waitting_load_keys_num": {
Parser: &normalParser{},
MetricMeta: &MetaData{
Name: "waitting_load_keys_num",
Help: "pika serve instance cache waiting load keys number",
Type: metricTypeGauge,
Labels: []string{LabelNameAddr, LabelNameAlias},
ValueName: "waitting_load_keys_num",
},
},
}
2 changes: 1 addition & 1 deletion tools/pika_exporter/exporter/metrics/command_exec_count.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ var collectCommandExecCountMetrics = map[string]MetricConfig{
Parser: &regexParser{
name: "command_exec_count_command",
source: "commands_count",
reg: regexp.MustCompile(`(\r|\n)*(?P<command>[^:]+):(?P<count>[\d]*)`),
reg: regexp.MustCompile(`[\r\n]+(?P<command>[^:\r\n]+):(?P<count>[\d]+)`),
Parser: &normalParser{},
},
},
Expand Down
12 changes: 6 additions & 6 deletions tools/pika_exporter/exporter/metrics/keyspace.go
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ var collectKeySpaceMetrics = map[string]MetricConfig{
Parser: &regexParser{
name: "keyspace_info_3.1.0-3.3.2",
reg: regexp.MustCompile(`(?P<db>db[\d]+)\s*(?P<data_type>[^_]+)\w*keys=(?P<keys>[\d]+)[,\s]*` +
`expires=(?P<expire_keys>[\d]+)[,\s]*invaild_keys=(?P<invalid_keys>[\d]+)`),
`expires=(?P<expire_keys>[\d]+)[,\s]*(?:invaild_keys|invalid_keys)=(?P<invalid_keys>[\d]+)`),
Parser: &normalParser{},
},
},
Expand All @@ -49,7 +49,7 @@ var collectKeySpaceMetrics = map[string]MetricConfig{
Parser: &regexParser{
name: "keyspace_info_>=3.1.0",
reg: regexp.MustCompile(`(?P<db>db[\d]+)\s*(?P<data_type>[^_]+)\w*keys=(?P<keys>[\d]+)[,\s]*` +
`expires=(?P<expire_keys>[\d]+)[,\s]*invalid_keys=(?P<invalid_keys>[\d]+)`),
`expires=(?P<expire_keys>[\d]+)[,\s]*(?:invaild_keys|invalid_keys)=(?P<invalid_keys>[\d]+)`),
Parser: &normalParser{},
},
},
Expand Down Expand Up @@ -88,7 +88,7 @@ var collectKeySpaceMetrics = map[string]MetricConfig{
Parser: &regexParser{
name: "keyspace_info_all_~3.0.5",
reg: regexp.MustCompile(`(?P<data_type>\w*):\s*keys=(?P<keys>[\d]+)[,\s]*` +
`expires=(?P<expire_keys>[\d]+)[,\s]*invaild_keys=(?P<invalid_keys>[\d]+)`),
`expires=(?P<expire_keys>[\d]+)[,\s]*(?:invaild_keys|invalid_keys)=(?P<invalid_keys>[\d]+)`),
Parser: &normalParser{},
},
},
Expand All @@ -97,7 +97,7 @@ var collectKeySpaceMetrics = map[string]MetricConfig{
Parser: &regexParser{
name: "keyspace_info_all_~3.1.0",
reg: regexp.MustCompile(`(?P<db>db[\d]+)_\s*(?P<data_type>[^:]+):\s*keys=(?P<keys>[\d]+)[,\s]*` +
`expires=(?P<expire_keys>[\d]+)[,\s]*invaild_keys=(?P<invalid_keys>[\d]+)`),
`expires=(?P<expire_keys>[\d]+)[,\s]*(?:invaild_keys|invalid_keys)=(?P<invalid_keys>[\d]+)`),
Parser: &normalParser{},
},
},
Expand All @@ -106,7 +106,7 @@ var collectKeySpaceMetrics = map[string]MetricConfig{
Parser: &regexParser{
name: "keyspace_info_all_3.1.0-3.3.2",
reg: regexp.MustCompile(`(?P<db>db[\d]+)\s*(?P<data_type>[^_]+)\w*keys=(?P<keys>[\d]+)[,\s]*` +
`expires=(?P<expire_keys>[\d]+)[,\s]*invaild_keys=(?P<invalid_keys>[\d]+)`),
`expires=(?P<expire_keys>[\d]+)[,\s]*(?:invaild_keys|invalid_keys)=(?P<invalid_keys>[\d]+)`),
Parser: &normalParser{},
},
},
Expand All @@ -115,7 +115,7 @@ var collectKeySpaceMetrics = map[string]MetricConfig{
Parser: &regexParser{
name: "keyspace_info_all_>=3.3.3",
reg: regexp.MustCompile(`(?P<db>db[\d]+)\s*(?P<data_type>[^_]+)\w*keys=(?P<keys>[\d]+)[,\s]*` +
`expires=(?P<expire_keys>[\d]+)[,\s]*invalid_keys=(?P<invalid_keys>[\d]+)`),
`expires=(?P<expire_keys>[\d]+)[,\s]*(?:invaild_keys|invalid_keys)=(?P<invalid_keys>[\d]+)`),
Parser: &normalParser{},
},
},
Expand Down
Loading
Loading