From 3a2c9d5558af14228a3cae93c69e9cadc8c5234f Mon Sep 17 00:00:00 2001
From: jurgenhaas <juergen@paragon-es.de>
Date: Sun, 13 Nov 2016 13:36:23 +0100
Subject: [PATCH] Add HaProxy monitoring to NetData

---
 files/haproxy.chart.py | 172 +++++++++++++++++++++++++++++++++++++++++
 files/haproxy.conf     |  38 +++++++++
 tasks/install.yml      |  12 +++
 3 files changed, 222 insertions(+)
 create mode 100755 files/haproxy.chart.py
 create mode 100644 files/haproxy.conf

diff --git a/files/haproxy.chart.py b/files/haproxy.chart.py
new file mode 100755
index 0000000..632e76b
--- /dev/null
+++ b/files/haproxy.chart.py
@@ -0,0 +1,172 @@
+# -*- coding: utf-8 -*-
+# Description: haproxy netdata python.d module
+# Author: Pawel Krupa (paulfantom)
+
+from base import SocketService, UrlService
+import csv
+
+# default module values (can be overridden per job in `config`)
+# update_every = 2
+priority = 60000
+retries = 60
+
+# charts order (can be overridden if you want less charts, or different order)
+ORDER = ['qcur', 'scur', 'bin', 'bout']
+POSITION = ['2', '4', '8', '9']
+
+CHARTS = {
+    'qcur': {
+        'options': ["", "Current queue", 'per sec', '', '', 'line'],
+        'lines': [
+            ['name', None, 'incremental']
+        ]},
+    'scur': {
+        'options': ["", "Current session rate", 'per sec', '', '', 'line'],
+        'lines': [
+            ['name', None, 'incremental']
+        ]},
+    'bin': {
+        'options': ["", "Bytes in", 'kilobytes/s', '', '', 'line'],
+        'lines': [
+            ['name', None, 'incremental', 1, 1024]
+        ]},
+    'bout': {
+        'options': ["", "Bytes out", 'kilobytes/s', '', '', 'line'],
+        'lines': [
+            ['name', None, 'incremental', 1, 1024]
+        ]},
+}
+
+
+class Service(SocketService, UrlService):
+    def __init__(self, configuration=None, name=None):
+        self.use_socket = 'unix_socket' in configuration
+        if self.use_socket:
+            SocketService.__init__(self, configuration=configuration, name=name)
+            self.request = "show stat\r\n"
+        else:
+            UrlService.__init__(self, configuration=configuration, name=name)
+            if not self.url.endswith("/;csv;norefresh"):
+                self.url += "/;csv;norefresh"
+
+        # self.order and self.definitions are created with _create_definitions method
+        # self.order = ORDER
+        # self.definitions = CHARTS
+        self.order = []
+        self.definitions = {}
+
+    def _get_parsed_data(self):
+        """
+        Retrieve and parse raw data
+        :return: dict
+        """
+        try:
+            if self.use_socket:
+                raw = SocketService._get_raw_data(self)
+            else:
+                raw = UrlService._get_raw_data(self)
+        except (ValueError, AttributeError):
+            return None
+
+        if raw is None:
+            return None
+
+        try:
+            # return [row for row in csv.reader(raw.splitlines(), delimiter=',')]
+            return list(csv.reader(raw.splitlines(), delimiter=','))
+        except Exception as e:
+            self.debug(str(e))
+            return None
+
+    def _get_data(self):
+        """
+        Format data
+        :return: dict
+        """
+        parsed = self._get_parsed_data()
+        # if parsed is None or len(parsed) == 0:
+        if not parsed:
+            return None
+
+        data = {}
+        for node in parsed[1:]:
+            try:
+                prefix = node[0] + "_" + node[1] + "_"
+            except IndexError:
+                continue
+            for i in range(len(ORDER)):
+                try:
+                    data[prefix + ORDER[i]] = int(node[int(POSITION[i])])
+                except ValueError:
+                    pass
+
+        # if len(data) == 0:
+        if not data:
+            return None
+        return data
+
+    def _check_raw_data(self, data):
+        # FIXME
+        return True
+
+    def _create_definitions(self):
+        try:
+            data = self._get_parsed_data()[1:]
+        except TypeError:
+            return False
+
+        # create order
+        all_pxnames = []
+        all_svnames = {}
+        last_pxname = ""
+        for node in data:
+            try:
+                pxname = node[0]
+                svname = node[1]
+            except IndexError:
+                continue
+            if pxname != last_pxname:
+                all_pxnames.append(pxname)
+                all_svnames[pxname] = [svname]
+                for key in ORDER:
+                    # order entry consists of pxname, "_", and column name (like qcur)
+                    self.order.append(pxname + "_" + key)
+            else:
+                all_svnames[pxname].append(svname)
+            last_pxname = pxname
+
+        # create definitions
+        for pxname in all_pxnames:
+            for name in ORDER:
+                options = list(CHARTS[name]['options'])
+                options[3] = pxname
+                options[4] = pxname + "." + name
+                line_template = CHARTS[name]['lines'][0]
+                lines = []
+                # omit_first = False
+                # if len(all_svnames[pxname]) > 1:
+                #     omit_first = True
+                for svname in all_svnames[pxname]:
+                    # if omit_first:
+                    #     omit_first = False
+                    #     continue
+                    tmp = list(line_template)
+                    # tmp[0] = pxname + "_" + svname + "_" + name
+                    tmp[0] = "_".join([pxname, svname, name])
+                    tmp[1] = svname
+                    lines.append(tmp)
+                self.definitions[pxname + "_" + name] = {'options': options, 'lines': lines}
+
+        return True
+
+    def check(self):
+        if self.use_socket:
+            SocketService.check(self)
+        else:
+            UrlService.check(self)
+
+        try:
+            return self._create_definitions()
+        except Exception as e:
+            self.debug(str(e))
+            return False
diff --git a/files/haproxy.conf b/files/haproxy.conf
new file mode 100644
index 0000000..29550b5
--- /dev/null
+++ b/files/haproxy.conf
@@ -0,0 +1,38 @@
+# netdata python.d.plugin configuration for haproxy
+#
+# This file is in YaML format. Generally the format is:
+#
+# name: value
+#
+# There are 2 sections:
+#  - global variables
+#  - one or more JOBS
+#
+# JOBS allow you to collect values from multiple sources.
+# Each source will have its own set of charts.
+#
+# JOB parameters have to be indented (using spaces only, example below).
+
+# ----------------------------------------------------------------------
+# Global Variables
+# These variables set the defaults for all JOBs, however each JOB
+# may define its own, overriding the defaults.
+
+# update_every sets the default data collection frequency.
+# If unset, the python.d.plugin default is used.
+# update_every: 1
+
+# priority controls the order of charts at the netdata dashboard.
+# Lower numbers move the charts towards the top of the page.
+# If unset, the default for python.d.plugin is used.
+# priority: 60000
+
+# retries sets the number of retries to be made in case of failures.
+# If unset, the default for python.d.plugin is used.
+# Attempts to restore the service are made once every update_every
+# and only if the module has collected values in the past.
+# retries: 5
+
+localurl:
+  name: 'local'
+  url: 'http://localhost:7000/haproxy_stats/;csv;norefresh'
diff --git a/tasks/install.yml b/tasks/install.yml
index d6b9f30..ff329eb 100644
--- a/tasks/install.yml
+++ b/tasks/install.yml
@@ -16,3 +16,15 @@
   args:
     chdir: /opt/netdata
   when: netdata_clone.changed
+
+- name: "Copy additional files"
+  copy:
+    src: '{{ item.filename }}'
+    dest: '{{ item.path }}/{{ item.filename }}'
+  with_items:
+    - filename: 'haproxy.chart.py'
+      path: '/usr/libexec/netdata/python.d'
+    - filename: 'haproxy.conf'
+      path: '/etc/netdata/python.d'
+  notify:
+    - "Restart NetData"
-- 
GitLab