Skip to content
Snippets Groups Projects
Commit 3a2c9d55 authored by jurgenhaas's avatar jurgenhaas
Browse files

Add HaProxy monitoring to NetData

parent 1fbfead0
No related branches found
No related tags found
No related merge requests found
# -*- coding: utf-8 -*-
# Description: haproxy netdata python.d module
# Author: Pawel Krupa (paulfantom)
from base import SocketService, UrlService
import csv
# default module values (can be overridden per job in `config`)
# update_every = 2
priority = 60000
retries = 60
# charts order (can be overridden if you want less charts, or different order)
ORDER = ['qcur', 'scur', 'bin', 'bout']
POSITION = ['2', '4', '8', '9']
CHARTS = {
'qcur': {
'options': ["", "Current queue", 'per sec', '', '', 'line'],
'lines': [
['name', None, 'incremental']
]},
'scur': {
'options': ["", "Current session rate", 'per sec', '', '', 'line'],
'lines': [
['name', None, 'incremental']
]},
'bin': {
'options': ["", "Bytes in", 'kilobytes/s', '', '', 'line'],
'lines': [
['name', None, 'incremental', 1, 1024]
]},
'bout': {
'options': ["", "Bytes out", 'kilobytes/s', '', '', 'line'],
'lines': [
['name', None, 'incremental', 1, 1024]
]},
}
class Service(SocketService, UrlService):
def __init__(self, configuration=None, name=None):
self.use_socket = 'unix_socket' in configuration
if self.use_socket:
SocketService.__init__(self, configuration=configuration, name=name)
self.request = "show stat\r\n"
else:
UrlService.__init__(self, configuration=configuration, name=name)
if not self.url.endswith("/;csv;norefresh"):
self.url += "/;csv;norefresh"
# self.order and self.definitions are created with _create_definitions method
# self.order = ORDER
# self.definitions = CHARTS
self.order = []
self.definitions = {}
def _get_parsed_data(self):
"""
Retrieve and parse raw data
:return: dict
"""
try:
if self.use_socket:
raw = SocketService._get_raw_data(self)
else:
raw = UrlService._get_raw_data(self)
except (ValueError, AttributeError):
return None
if raw is None:
return None
try:
# return [row for row in csv.reader(raw.splitlines(), delimiter=',')]
return list(csv.reader(raw.splitlines(), delimiter=','))
except Exception as e:
self.debug(str(e))
return None
def _get_data(self):
"""
Format data
:return: dict
"""
parsed = self._get_parsed_data()
# if parsed is None or len(parsed) == 0:
if not parsed:
return None
data = {}
for node in parsed[1:]:
try:
prefix = node[0] + "_" + node[1] + "_"
except IndexError:
continue
for i in range(len(ORDER)):
try:
data[prefix + ORDER[i]] = int(node[int(POSITION[i])])
except ValueError:
pass
# if len(data) == 0:
if not data:
return None
return data
def _check_raw_data(self, data):
# FIXME
return True
def _create_definitions(self):
try:
data = self._get_parsed_data()[1:]
except TypeError:
return False
# create order
all_pxnames = []
all_svnames = {}
last_pxname = ""
for node in data:
try:
pxname = node[0]
svname = node[1]
except IndexError:
continue
if pxname != last_pxname:
all_pxnames.append(pxname)
all_svnames[pxname] = [svname]
for key in ORDER:
# order entry consists of pxname, "_", and column name (like qcur)
self.order.append(pxname + "_" + key)
else:
all_svnames[pxname].append(svname)
last_pxname = pxname
# create definitions
for pxname in all_pxnames:
for name in ORDER:
options = list(CHARTS[name]['options'])
options[3] = pxname
options[4] = pxname + "." + name
line_template = CHARTS[name]['lines'][0]
lines = []
# omit_first = False
# if len(all_svnames[pxname]) > 1:
# omit_first = True
for svname in all_svnames[pxname]:
# if omit_first:
# omit_first = False
# continue
tmp = list(line_template)
# tmp[0] = pxname + "_" + svname + "_" + name
tmp[0] = "_".join([pxname, svname, name])
tmp[1] = svname
lines.append(tmp)
self.definitions[pxname + "_" + name] = {'options': options, 'lines': lines}
return True
def check(self):
if self.use_socket:
SocketService.check(self)
else:
UrlService.check(self)
try:
return self._create_definitions()
except Exception as e:
self.debug(str(e))
return False
# netdata python.d.plugin configuration for haproxy
#
# This file is in YaML format. Generally the format is:
#
# name: value
#
# There are 2 sections:
# - global variables
# - one or more JOBS
#
# JOBS allow you to collect values from multiple sources.
# Each source will have its own set of charts.
#
# JOB parameters have to be indented (using spaces only, example below).
# ----------------------------------------------------------------------
# Global Variables
# These variables set the defaults for all JOBs, however each JOB
# may define its own, overriding the defaults.
# update_every sets the default data collection frequency.
# If unset, the python.d.plugin default is used.
# update_every: 1
# priority controls the order of charts at the netdata dashboard.
# Lower numbers move the charts towards the top of the page.
# If unset, the default for python.d.plugin is used.
# priority: 60000
# retries sets the number of retries to be made in case of failures.
# If unset, the default for python.d.plugin is used.
# Attempts to restore the service are made once every update_every
# and only if the module has collected values in the past.
# retries: 5
localurl:
name: 'local'
url: 'http://localhost:7000/haproxy_stats/;csv;norefresh'
......@@ -16,3 +16,15 @@
args:
chdir: /opt/netdata
when: netdata_clone.changed
- name: "Copy additional files"
copy:
src: '{{ item.filename }}'
dest: '{{ item.path }}/{{ item.filename }}'
with_items:
- filename: 'haproxy.chart.py'
path: '/usr/libexec/netdata/python.d'
- filename: 'haproxy.conf'
path: '/etc/netdata/python.d'
notify:
- "Restart NetData"
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment