Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
N
NetData
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Deploy
Releases
Model registry
Monitor
Incidents
Service Desk
Analyze
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
Ansible
Roles
NetData
Commits
3a2c9d55
Commit
3a2c9d55
authored
8 years ago
by
jurgenhaas
Browse files
Options
Downloads
Patches
Plain Diff
Add HaProxy monitoring to NetData
parent
1fbfead0
No related branches found
Branches containing commit
No related tags found
No related merge requests found
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
files/haproxy.chart.py
+172
-0
172 additions, 0 deletions
files/haproxy.chart.py
files/haproxy.conf
+38
-0
38 additions, 0 deletions
files/haproxy.conf
tasks/install.yml
+12
-0
12 additions, 0 deletions
tasks/install.yml
with
222 additions
and
0 deletions
files/haproxy.chart.py
0 → 100755
+
172
−
0
View file @
3a2c9d55
# -*- coding: utf-8 -*-
# Description: haproxy netdata python.d module
# Author: Pawel Krupa (paulfantom)
from
base
import
SocketService
,
UrlService
import
csv
# default module values (can be overridden per job in `config`)
# update_every = 2
priority
=
60000
retries
=
60
# charts order (can be overridden if you want less charts, or different order)
ORDER
=
[
'
qcur
'
,
'
scur
'
,
'
bin
'
,
'
bout
'
]
POSITION
=
[
'
2
'
,
'
4
'
,
'
8
'
,
'
9
'
]
CHARTS
=
{
'
qcur
'
:
{
'
options
'
:
[
""
,
"
Current queue
"
,
'
per sec
'
,
''
,
''
,
'
line
'
],
'
lines
'
:
[
[
'
name
'
,
None
,
'
incremental
'
]
]},
'
scur
'
:
{
'
options
'
:
[
""
,
"
Current session rate
"
,
'
per sec
'
,
''
,
''
,
'
line
'
],
'
lines
'
:
[
[
'
name
'
,
None
,
'
incremental
'
]
]},
'
bin
'
:
{
'
options
'
:
[
""
,
"
Bytes in
"
,
'
kilobytes/s
'
,
''
,
''
,
'
line
'
],
'
lines
'
:
[
[
'
name
'
,
None
,
'
incremental
'
,
1
,
1024
]
]},
'
bout
'
:
{
'
options
'
:
[
""
,
"
Bytes out
"
,
'
kilobytes/s
'
,
''
,
''
,
'
line
'
],
'
lines
'
:
[
[
'
name
'
,
None
,
'
incremental
'
,
1
,
1024
]
]},
}
class
Service
(
SocketService
,
UrlService
):
def
__init__
(
self
,
configuration
=
None
,
name
=
None
):
self
.
use_socket
=
'
unix_socket
'
in
configuration
if
self
.
use_socket
:
SocketService
.
__init__
(
self
,
configuration
=
configuration
,
name
=
name
)
self
.
request
=
"
show stat
\r\n
"
else
:
UrlService
.
__init__
(
self
,
configuration
=
configuration
,
name
=
name
)
if
not
self
.
url
.
endswith
(
"
/;csv;norefresh
"
):
self
.
url
+=
"
/;csv;norefresh
"
# self.order and self.definitions are created with _create_definitions method
# self.order = ORDER
# self.definitions = CHARTS
self
.
order
=
[]
self
.
definitions
=
{}
def
_get_parsed_data
(
self
):
"""
Retrieve and parse raw data
:return: dict
"""
try
:
if
self
.
use_socket
:
raw
=
SocketService
.
_get_raw_data
(
self
)
else
:
raw
=
UrlService
.
_get_raw_data
(
self
)
except
(
ValueError
,
AttributeError
):
return
None
if
raw
is
None
:
return
None
try
:
# return [row for row in csv.reader(raw.splitlines(), delimiter=',')]
return
list
(
csv
.
reader
(
raw
.
splitlines
(),
delimiter
=
'
,
'
))
except
Exception
as
e
:
self
.
debug
(
str
(
e
))
return
None
def
_get_data
(
self
):
"""
Format data
:return: dict
"""
parsed
=
self
.
_get_parsed_data
()
# if parsed is None or len(parsed) == 0:
if
not
parsed
:
return
None
data
=
{}
for
node
in
parsed
[
1
:]:
try
:
prefix
=
node
[
0
]
+
"
_
"
+
node
[
1
]
+
"
_
"
except
IndexError
:
continue
for
i
in
range
(
len
(
ORDER
)):
try
:
data
[
prefix
+
ORDER
[
i
]]
=
int
(
node
[
int
(
POSITION
[
i
])])
except
ValueError
:
pass
# if len(data) == 0:
if
not
data
:
return
None
return
data
def
_check_raw_data
(
self
,
data
):
# FIXME
return
True
def
_create_definitions
(
self
):
try
:
data
=
self
.
_get_parsed_data
()[
1
:]
except
TypeError
:
return
False
# create order
all_pxnames
=
[]
all_svnames
=
{}
last_pxname
=
""
for
node
in
data
:
try
:
pxname
=
node
[
0
]
svname
=
node
[
1
]
except
IndexError
:
continue
if
pxname
!=
last_pxname
:
all_pxnames
.
append
(
pxname
)
all_svnames
[
pxname
]
=
[
svname
]
for
key
in
ORDER
:
# order entry consists of pxname, "_", and column name (like qcur)
self
.
order
.
append
(
pxname
+
"
_
"
+
key
)
else
:
all_svnames
[
pxname
].
append
(
svname
)
last_pxname
=
pxname
# create definitions
for
pxname
in
all_pxnames
:
for
name
in
ORDER
:
options
=
list
(
CHARTS
[
name
][
'
options
'
])
options
[
3
]
=
pxname
options
[
4
]
=
pxname
+
"
.
"
+
name
line_template
=
CHARTS
[
name
][
'
lines
'
][
0
]
lines
=
[]
# omit_first = False
# if len(all_svnames[pxname]) > 1:
# omit_first = True
for
svname
in
all_svnames
[
pxname
]:
# if omit_first:
# omit_first = False
# continue
tmp
=
list
(
line_template
)
# tmp[0] = pxname + "_" + svname + "_" + name
tmp
[
0
]
=
"
_
"
.
join
([
pxname
,
svname
,
name
])
tmp
[
1
]
=
svname
lines
.
append
(
tmp
)
self
.
definitions
[
pxname
+
"
_
"
+
name
]
=
{
'
options
'
:
options
,
'
lines
'
:
lines
}
return
True
def
check
(
self
):
if
self
.
use_socket
:
SocketService
.
check
(
self
)
else
:
UrlService
.
check
(
self
)
try
:
return
self
.
_create_definitions
()
except
Exception
as
e
:
self
.
debug
(
str
(
e
))
return
False
This diff is collapsed.
Click to expand it.
files/haproxy.conf
0 → 100644
+
38
−
0
View file @
3a2c9d55
# netdata python.d.plugin configuration for haproxy
#
# This file is in YaML format. Generally the format is:
#
# name: value
#
# There are 2 sections:
# - global variables
# - one or more JOBS
#
# JOBS allow you to collect values from multiple sources.
# Each source will have its own set of charts.
#
# JOB parameters have to be indented (using spaces only, example below).
# ----------------------------------------------------------------------
# Global Variables
# These variables set the defaults for all JOBs, however each JOB
# may define its own, overriding the defaults.
# update_every sets the default data collection frequency.
# If unset, the python.d.plugin default is used.
# update_every: 1
# priority controls the order of charts at the netdata dashboard.
# Lower numbers move the charts towards the top of the page.
# If unset, the default for python.d.plugin is used.
# priority: 60000
# retries sets the number of retries to be made in case of failures.
# If unset, the default for python.d.plugin is used.
# Attempts to restore the service are made once every update_every
# and only if the module has collected values in the past.
# retries: 5
localurl
:
name
:
'local'
url
:
'http://localhost:7000/haproxy_stats/;csv;norefresh'
This diff is collapsed.
Click to expand it.
tasks/install.yml
+
12
−
0
View file @
3a2c9d55
...
...
@@ -16,3 +16,15 @@
args
:
chdir
:
/opt/netdata
when
:
netdata_clone.changed
-
name
:
"
Copy
additional
files"
copy
:
src
:
'
{{
item.filename
}}'
dest
:
'
{{
item.path
}}/{{
item.filename
}}'
with_items
:
-
filename
:
'
haproxy.chart.py'
path
:
'
/usr/libexec/netdata/python.d'
-
filename
:
'
haproxy.conf'
path
:
'
/etc/netdata/python.d'
notify
:
-
"
Restart
NetData"
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment