summaryrefslogtreecommitdiffstats
path: root/python.d
diff options
context:
space:
mode:
authorFederico Ceratto <federico.ceratto@gmail.com>2017-12-19 23:39:27 +0000
committerFederico Ceratto <federico.ceratto@gmail.com>2017-12-19 23:39:27 +0000
commit6abdfdead1326ccca98dc4cf415c216f1bf25400 (patch)
tree70b803bd499fd45e89627c1b45b90ddf20e8e959 /python.d
parentRelease v. 1.8.0+dfsg-1 to Unstable (diff)
parentNew upstream version 1.9.0+dfsg (diff)
downloadnetdata-6abdfdead1326ccca98dc4cf415c216f1bf25400.tar.xz
netdata-6abdfdead1326ccca98dc4cf415c216f1bf25400.zip
Update upstream source from tag 'upstream/1.9.0+dfsg'
Update to upstream version '1.9.0+dfsg' with Debian dir 28b8242a05f9ad26cd1cdbcf078be754fc7d6251
Diffstat (limited to 'python.d')
-rw-r--r--python.d/Makefile.am34
-rw-r--r--python.d/Makefile.in154
-rw-r--r--python.d/README.md353
-rw-r--r--python.d/apache.chart.py37
-rw-r--r--python.d/apache_cache.chart.py60
-rw-r--r--python.d/beanstalk.chart.py250
-rw-r--r--python.d/bind_rndc.chart.py26
-rw-r--r--python.d/chrony.chart.py4
-rw-r--r--python.d/couchdb.chart.py410
-rw-r--r--python.d/cpufreq.chart.py23
-rw-r--r--python.d/cpuidle.chart.py27
-rw-r--r--python.d/dns_query_time.chart.py15
-rw-r--r--python.d/dnsdist.chart.py101
-rw-r--r--python.d/dovecot.chart.py19
-rw-r--r--python.d/elasticsearch.chart.py3
-rw-r--r--python.d/example.chart.py51
-rw-r--r--python.d/exim.chart.py2
-rw-r--r--python.d/fail2ban.chart.py13
-rw-r--r--python.d/freeradius.chart.py101
-rw-r--r--python.d/go_expvar.chart.py31
-rw-r--r--python.d/haproxy.chart.py4
-rw-r--r--python.d/hddtemp.chart.py66
-rw-r--r--python.d/ipfs.chart.py85
-rw-r--r--python.d/isc_dhcpd.chart.py22
-rw-r--r--python.d/mdstat.chart.py7
-rw-r--r--python.d/memcached.chart.py11
-rw-r--r--python.d/mongodb.chart.py4
-rw-r--r--python.d/mysql.chart.py84
-rw-r--r--python.d/nginx.chart.py14
-rw-r--r--python.d/nsd.chart.py4
-rw-r--r--python.d/ovpn_status_log.chart.py41
-rw-r--r--python.d/phpfpm.chart.py9
-rw-r--r--python.d/postfix.chart.py2
-rw-r--r--python.d/postgres.chart.py33
-rw-r--r--python.d/powerdns.chart.py58
-rw-r--r--python.d/python_modules/base.py1126
-rw-r--r--python.d/python_modules/bases/FrameworkServices/ExecutableService.py85
-rw-r--r--python.d/python_modules/bases/FrameworkServices/LogService.py78
-rw-r--r--python.d/python_modules/bases/FrameworkServices/MySQLService.py158
-rw-r--r--python.d/python_modules/bases/FrameworkServices/SimpleService.py252
-rw-r--r--python.d/python_modules/bases/FrameworkServices/SocketService.py250
-rw-r--r--python.d/python_modules/bases/FrameworkServices/UrlService.py115
-rw-r--r--python.d/python_modules/bases/FrameworkServices/__init__.py0
-rw-r--r--python.d/python_modules/bases/__init__.py0
-rw-r--r--python.d/python_modules/bases/charts.py376
-rw-r--r--python.d/python_modules/bases/collection.py144
-rw-r--r--python.d/python_modules/bases/loaders.py66
-rw-r--r--python.d/python_modules/bases/loggers.py205
-rw-r--r--python.d/python_modules/msg.py101
-rw-r--r--python.d/python_modules/third_party/__init__.py0
-rw-r--r--python.d/python_modules/third_party/lm_sensors.py (renamed from python.d/python_modules/lm_sensors.py)0
-rw-r--r--python.d/python_modules/third_party/ordereddict.py128
-rw-r--r--python.d/rabbitmq.chart.py2
-rw-r--r--python.d/redis.chart.py33
-rw-r--r--python.d/retroshare.chart.py10
-rw-r--r--python.d/samba.chart.py8
-rw-r--r--python.d/sensors.chart.py68
-rw-r--r--python.d/smartd_log.chart.py508
-rw-r--r--python.d/squid.chart.py11
-rw-r--r--python.d/tomcat.chart.py15
-rw-r--r--python.d/varnish.chart.py406
-rw-r--r--python.d/web_log.chart.py344
62 files changed, 4285 insertions, 2362 deletions
diff --git a/python.d/Makefile.am b/python.d/Makefile.am
index 84c2aead..1c84cddb 100644
--- a/python.d/Makefile.am
+++ b/python.d/Makefile.am
@@ -14,12 +14,14 @@ dist_python_SCRIPTS = \
dist_python_DATA = \
README.md \
apache.chart.py \
- apache_cache.chart.py \
+ beanstalk.chart.py \
bind_rndc.chart.py \
chrony.chart.py \
+ couchdb.chart.py \
cpufreq.chart.py \
cpuidle.chart.py \
dns_query_time.chart.py \
+ dnsdist.chart.py \
dovecot.chart.py \
elasticsearch.chart.py \
example.chart.py \
@@ -41,6 +43,7 @@ dist_python_DATA = \
phpfpm.chart.py \
postfix.chart.py \
postgres.chart.py \
+ powerdns.chart.py \
rabbitmq.chart.py \
redis.chart.py \
retroshare.chart.py \
@@ -57,8 +60,33 @@ pythonmodulesdir=$(pythondir)/python_modules
dist_pythonmodules_DATA = \
python_modules/__init__.py \
python_modules/base.py \
- python_modules/msg.py \
- python_modules/lm_sensors.py \
+ $(NULL)
+
+basesdir=$(pythonmodulesdir)/bases
+dist_bases_DATA = \
+ python_modules/bases/__init__.py \
+ python_modules/bases/charts.py \
+ python_modules/bases/collection.py \
+ python_modules/bases/loaders.py \
+ python_modules/bases/loggers.py \
+ $(NULL)
+
+bases_framework_servicesdir=$(basesdir)/FrameworkServices
+dist_bases_framework_services_DATA = \
+ python_modules/bases/FrameworkServices/__init__.py \
+ python_modules/bases/FrameworkServices/ExecutableService.py \
+ python_modules/bases/FrameworkServices/LogService.py \
+ python_modules/bases/FrameworkServices/MySQLService.py \
+ python_modules/bases/FrameworkServices/SimpleService.py \
+ python_modules/bases/FrameworkServices/SocketService.py \
+ python_modules/bases/FrameworkServices/UrlService.py \
+ $(NULL)
+
+third_partydir=$(pythonmodulesdir)/third_party
+dist_third_party_DATA = \
+ python_modules/third_party/__init__.py \
+ python_modules/third_party/ordereddict.py \
+ python_modules/third_party/lm_sensors.py \
$(NULL)
pythonyaml2dir=$(pythonmodulesdir)/pyyaml2
diff --git a/python.d/Makefile.in b/python.d/Makefile.in
index 104f4f1c..dda54e1a 100644
--- a/python.d/Makefile.in
+++ b/python.d/Makefile.in
@@ -81,6 +81,7 @@ build_triplet = @build@
host_triplet = @host@
DIST_COMMON = $(top_srcdir)/build/subst.inc $(srcdir)/Makefile.in \
$(srcdir)/Makefile.am $(dist_python_SCRIPTS) \
+ $(dist_bases_DATA) $(dist_bases_framework_services_DATA) \
$(dist_python_DATA) $(dist_python_urllib3_DATA) \
$(dist_python_urllib3_backports_DATA) \
$(dist_python_urllib3_contrib_DATA) \
@@ -88,7 +89,8 @@ DIST_COMMON = $(top_srcdir)/build/subst.inc $(srcdir)/Makefile.in \
$(dist_python_urllib3_securetransport_DATA) \
$(dist_python_urllib3_ssl_match_hostname_DATA) \
$(dist_python_urllib3_util_DATA) $(dist_pythonmodules_DATA) \
- $(dist_pythonyaml2_DATA) $(dist_pythonyaml3_DATA)
+ $(dist_pythonyaml2_DATA) $(dist_pythonyaml3_DATA) \
+ $(dist_third_party_DATA)
subdir = python.d
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/ax_c___atomic.m4 \
@@ -132,8 +134,9 @@ am__uninstall_files_from_dir = { \
|| { echo " ( cd '$$dir' && rm -f" $$files ")"; \
$(am__cd) "$$dir" && rm -f $$files; }; \
}
-am__installdirs = "$(DESTDIR)$(pythondir)" "$(DESTDIR)$(pythondir)" \
- "$(DESTDIR)$(python_urllib3dir)" \
+am__installdirs = "$(DESTDIR)$(pythondir)" "$(DESTDIR)$(basesdir)" \
+ "$(DESTDIR)$(bases_framework_servicesdir)" \
+ "$(DESTDIR)$(pythondir)" "$(DESTDIR)$(python_urllib3dir)" \
"$(DESTDIR)$(python_urllib3_backportsdir)" \
"$(DESTDIR)$(python_urllib3_contribdir)" \
"$(DESTDIR)$(python_urllib3_packagesdir)" \
@@ -141,7 +144,7 @@ am__installdirs = "$(DESTDIR)$(pythondir)" "$(DESTDIR)$(pythondir)" \
"$(DESTDIR)$(python_urllib3_ssl_match_hostnamedir)" \
"$(DESTDIR)$(python_urllib3_utildir)" \
"$(DESTDIR)$(pythonmodulesdir)" "$(DESTDIR)$(pythonyaml2dir)" \
- "$(DESTDIR)$(pythonyaml3dir)"
+ "$(DESTDIR)$(pythonyaml3dir)" "$(DESTDIR)$(third_partydir)"
SCRIPTS = $(dist_python_SCRIPTS)
AM_V_P = $(am__v_P_@AM_V@)
am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
@@ -162,14 +165,16 @@ am__can_run_installinfo = \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
-DATA = $(dist_python_DATA) $(dist_python_urllib3_DATA) \
+DATA = $(dist_bases_DATA) $(dist_bases_framework_services_DATA) \
+ $(dist_python_DATA) $(dist_python_urllib3_DATA) \
$(dist_python_urllib3_backports_DATA) \
$(dist_python_urllib3_contrib_DATA) \
$(dist_python_urllib3_packages_DATA) \
$(dist_python_urllib3_securetransport_DATA) \
$(dist_python_urllib3_ssl_match_hostname_DATA) \
$(dist_python_urllib3_util_DATA) $(dist_pythonmodules_DATA) \
- $(dist_pythonyaml2_DATA) $(dist_pythonyaml3_DATA)
+ $(dist_pythonyaml2_DATA) $(dist_pythonyaml3_DATA) \
+ $(dist_third_party_DATA)
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = @ACLOCAL@
@@ -329,12 +334,14 @@ dist_python_SCRIPTS = \
dist_python_DATA = \
README.md \
apache.chart.py \
- apache_cache.chart.py \
+ beanstalk.chart.py \
bind_rndc.chart.py \
chrony.chart.py \
+ couchdb.chart.py \
cpufreq.chart.py \
cpuidle.chart.py \
dns_query_time.chart.py \
+ dnsdist.chart.py \
dovecot.chart.py \
elasticsearch.chart.py \
example.chart.py \
@@ -356,6 +363,7 @@ dist_python_DATA = \
phpfpm.chart.py \
postfix.chart.py \
postgres.chart.py \
+ powerdns.chart.py \
rabbitmq.chart.py \
redis.chart.py \
retroshare.chart.py \
@@ -372,8 +380,33 @@ pythonmodulesdir = $(pythondir)/python_modules
dist_pythonmodules_DATA = \
python_modules/__init__.py \
python_modules/base.py \
- python_modules/msg.py \
- python_modules/lm_sensors.py \
+ $(NULL)
+
+basesdir = $(pythonmodulesdir)/bases
+dist_bases_DATA = \
+ python_modules/bases/__init__.py \
+ python_modules/bases/charts.py \
+ python_modules/bases/collection.py \
+ python_modules/bases/loaders.py \
+ python_modules/bases/loggers.py \
+ $(NULL)
+
+bases_framework_servicesdir = $(basesdir)/FrameworkServices
+dist_bases_framework_services_DATA = \
+ python_modules/bases/FrameworkServices/__init__.py \
+ python_modules/bases/FrameworkServices/ExecutableService.py \
+ python_modules/bases/FrameworkServices/LogService.py \
+ python_modules/bases/FrameworkServices/MySQLService.py \
+ python_modules/bases/FrameworkServices/SimpleService.py \
+ python_modules/bases/FrameworkServices/SocketService.py \
+ python_modules/bases/FrameworkServices/UrlService.py \
+ $(NULL)
+
+third_partydir = $(pythonmodulesdir)/third_party
+dist_third_party_DATA = \
+ python_modules/third_party/__init__.py \
+ python_modules/third_party/ordereddict.py \
+ python_modules/third_party/lm_sensors.py \
$(NULL)
pythonyaml2dir = $(pythonmodulesdir)/pyyaml2
@@ -552,6 +585,48 @@ uninstall-dist_pythonSCRIPTS:
files=`for p in $$list; do echo "$$p"; done | \
sed -e 's,.*/,,;$(transform)'`; \
dir='$(DESTDIR)$(pythondir)'; $(am__uninstall_files_from_dir)
+install-dist_basesDATA: $(dist_bases_DATA)
+ @$(NORMAL_INSTALL)
+ @list='$(dist_bases_DATA)'; test -n "$(basesdir)" || list=; \
+ if test -n "$$list"; then \
+ echo " $(MKDIR_P) '$(DESTDIR)$(basesdir)'"; \
+ $(MKDIR_P) "$(DESTDIR)$(basesdir)" || exit 1; \
+ fi; \
+ for p in $$list; do \
+ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+ echo "$$d$$p"; \
+ done | $(am__base_list) | \
+ while read files; do \
+ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(basesdir)'"; \
+ $(INSTALL_DATA) $$files "$(DESTDIR)$(basesdir)" || exit $$?; \
+ done
+
+uninstall-dist_basesDATA:
+ @$(NORMAL_UNINSTALL)
+ @list='$(dist_bases_DATA)'; test -n "$(basesdir)" || list=; \
+ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
+ dir='$(DESTDIR)$(basesdir)'; $(am__uninstall_files_from_dir)
+install-dist_bases_framework_servicesDATA: $(dist_bases_framework_services_DATA)
+ @$(NORMAL_INSTALL)
+ @list='$(dist_bases_framework_services_DATA)'; test -n "$(bases_framework_servicesdir)" || list=; \
+ if test -n "$$list"; then \
+ echo " $(MKDIR_P) '$(DESTDIR)$(bases_framework_servicesdir)'"; \
+ $(MKDIR_P) "$(DESTDIR)$(bases_framework_servicesdir)" || exit 1; \
+ fi; \
+ for p in $$list; do \
+ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+ echo "$$d$$p"; \
+ done | $(am__base_list) | \
+ while read files; do \
+ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(bases_framework_servicesdir)'"; \
+ $(INSTALL_DATA) $$files "$(DESTDIR)$(bases_framework_servicesdir)" || exit $$?; \
+ done
+
+uninstall-dist_bases_framework_servicesDATA:
+ @$(NORMAL_UNINSTALL)
+ @list='$(dist_bases_framework_services_DATA)'; test -n "$(bases_framework_servicesdir)" || list=; \
+ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
+ dir='$(DESTDIR)$(bases_framework_servicesdir)'; $(am__uninstall_files_from_dir)
install-dist_pythonDATA: $(dist_python_DATA)
@$(NORMAL_INSTALL)
@list='$(dist_python_DATA)'; test -n "$(pythondir)" || list=; \
@@ -783,6 +858,27 @@ uninstall-dist_pythonyaml3DATA:
@list='$(dist_pythonyaml3_DATA)'; test -n "$(pythonyaml3dir)" || list=; \
files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
dir='$(DESTDIR)$(pythonyaml3dir)'; $(am__uninstall_files_from_dir)
+install-dist_third_partyDATA: $(dist_third_party_DATA)
+ @$(NORMAL_INSTALL)
+ @list='$(dist_third_party_DATA)'; test -n "$(third_partydir)" || list=; \
+ if test -n "$$list"; then \
+ echo " $(MKDIR_P) '$(DESTDIR)$(third_partydir)'"; \
+ $(MKDIR_P) "$(DESTDIR)$(third_partydir)" || exit 1; \
+ fi; \
+ for p in $$list; do \
+ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+ echo "$$d$$p"; \
+ done | $(am__base_list) | \
+ while read files; do \
+ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(third_partydir)'"; \
+ $(INSTALL_DATA) $$files "$(DESTDIR)$(third_partydir)" || exit $$?; \
+ done
+
+uninstall-dist_third_partyDATA:
+ @$(NORMAL_UNINSTALL)
+ @list='$(dist_third_party_DATA)'; test -n "$(third_partydir)" || list=; \
+ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
+ dir='$(DESTDIR)$(third_partydir)'; $(am__uninstall_files_from_dir)
tags TAGS:
ctags CTAGS:
@@ -824,7 +920,7 @@ check-am: all-am
check: check-am
all-am: Makefile $(SCRIPTS) $(DATA)
installdirs:
- for dir in "$(DESTDIR)$(pythondir)" "$(DESTDIR)$(pythondir)" "$(DESTDIR)$(python_urllib3dir)" "$(DESTDIR)$(python_urllib3_backportsdir)" "$(DESTDIR)$(python_urllib3_contribdir)" "$(DESTDIR)$(python_urllib3_packagesdir)" "$(DESTDIR)$(python_urllib3_securetransportdir)" "$(DESTDIR)$(python_urllib3_ssl_match_hostnamedir)" "$(DESTDIR)$(python_urllib3_utildir)" "$(DESTDIR)$(pythonmodulesdir)" "$(DESTDIR)$(pythonyaml2dir)" "$(DESTDIR)$(pythonyaml3dir)"; do \
+ for dir in "$(DESTDIR)$(pythondir)" "$(DESTDIR)$(basesdir)" "$(DESTDIR)$(bases_framework_servicesdir)" "$(DESTDIR)$(pythondir)" "$(DESTDIR)$(python_urllib3dir)" "$(DESTDIR)$(python_urllib3_backportsdir)" "$(DESTDIR)$(python_urllib3_contribdir)" "$(DESTDIR)$(python_urllib3_packagesdir)" "$(DESTDIR)$(python_urllib3_securetransportdir)" "$(DESTDIR)$(python_urllib3_ssl_match_hostnamedir)" "$(DESTDIR)$(python_urllib3_utildir)" "$(DESTDIR)$(pythonmodulesdir)" "$(DESTDIR)$(pythonyaml2dir)" "$(DESTDIR)$(pythonyaml3dir)" "$(DESTDIR)$(third_partydir)"; do \
test -z "$$dir" || $(MKDIR_P) "$$dir"; \
done
install: install-am
@@ -879,7 +975,9 @@ info: info-am
info-am:
-install-data-am: install-dist_pythonDATA install-dist_pythonSCRIPTS \
+install-data-am: install-dist_basesDATA \
+ install-dist_bases_framework_servicesDATA \
+ install-dist_pythonDATA install-dist_pythonSCRIPTS \
install-dist_python_urllib3DATA \
install-dist_python_urllib3_backportsDATA \
install-dist_python_urllib3_contribDATA \
@@ -888,7 +986,7 @@ install-data-am: install-dist_pythonDATA install-dist_pythonSCRIPTS \
install-dist_python_urllib3_ssl_match_hostnameDATA \
install-dist_python_urllib3_utilDATA \
install-dist_pythonmodulesDATA install-dist_pythonyaml2DATA \
- install-dist_pythonyaml3DATA
+ install-dist_pythonyaml3DATA install-dist_third_partyDATA
install-dvi: install-dvi-am
@@ -932,7 +1030,9 @@ ps: ps-am
ps-am:
-uninstall-am: uninstall-dist_pythonDATA uninstall-dist_pythonSCRIPTS \
+uninstall-am: uninstall-dist_basesDATA \
+ uninstall-dist_bases_framework_servicesDATA \
+ uninstall-dist_pythonDATA uninstall-dist_pythonSCRIPTS \
uninstall-dist_python_urllib3DATA \
uninstall-dist_python_urllib3_backportsDATA \
uninstall-dist_python_urllib3_contribDATA \
@@ -941,15 +1041,18 @@ uninstall-am: uninstall-dist_pythonDATA uninstall-dist_pythonSCRIPTS \
uninstall-dist_python_urllib3_ssl_match_hostnameDATA \
uninstall-dist_python_urllib3_utilDATA \
uninstall-dist_pythonmodulesDATA \
- uninstall-dist_pythonyaml2DATA uninstall-dist_pythonyaml3DATA
+ uninstall-dist_pythonyaml2DATA uninstall-dist_pythonyaml3DATA \
+ uninstall-dist_third_partyDATA
.MAKE: install-am install-strip
.PHONY: all all-am check check-am clean clean-generic cscopelist-am \
ctags-am distclean distclean-generic distdir dvi dvi-am html \
html-am info info-am install install-am install-data \
- install-data-am install-dist_pythonDATA \
- install-dist_pythonSCRIPTS install-dist_python_urllib3DATA \
+ install-data-am install-dist_basesDATA \
+ install-dist_bases_framework_servicesDATA \
+ install-dist_pythonDATA install-dist_pythonSCRIPTS \
+ install-dist_python_urllib3DATA \
install-dist_python_urllib3_backportsDATA \
install-dist_python_urllib3_contribDATA \
install-dist_python_urllib3_packagesDATA \
@@ -957,13 +1060,15 @@ uninstall-am: uninstall-dist_pythonDATA uninstall-dist_pythonSCRIPTS \
install-dist_python_urllib3_ssl_match_hostnameDATA \
install-dist_python_urllib3_utilDATA \
install-dist_pythonmodulesDATA install-dist_pythonyaml2DATA \
- install-dist_pythonyaml3DATA install-dvi install-dvi-am \
- install-exec install-exec-am install-html install-html-am \
- install-info install-info-am install-man install-pdf \
- install-pdf-am install-ps install-ps-am install-strip \
- installcheck installcheck-am installdirs maintainer-clean \
- maintainer-clean-generic mostlyclean mostlyclean-generic pdf \
- pdf-am ps ps-am tags-am uninstall uninstall-am \
+ install-dist_pythonyaml3DATA install-dist_third_partyDATA \
+ install-dvi install-dvi-am install-exec install-exec-am \
+ install-html install-html-am install-info install-info-am \
+ install-man install-pdf install-pdf-am install-ps \
+ install-ps-am install-strip installcheck installcheck-am \
+ installdirs maintainer-clean maintainer-clean-generic \
+ mostlyclean mostlyclean-generic pdf pdf-am ps ps-am tags-am \
+ uninstall uninstall-am uninstall-dist_basesDATA \
+ uninstall-dist_bases_framework_servicesDATA \
uninstall-dist_pythonDATA uninstall-dist_pythonSCRIPTS \
uninstall-dist_python_urllib3DATA \
uninstall-dist_python_urllib3_backportsDATA \
@@ -973,7 +1078,8 @@ uninstall-am: uninstall-dist_pythonDATA uninstall-dist_pythonSCRIPTS \
uninstall-dist_python_urllib3_ssl_match_hostnameDATA \
uninstall-dist_python_urllib3_utilDATA \
uninstall-dist_pythonmodulesDATA \
- uninstall-dist_pythonyaml2DATA uninstall-dist_pythonyaml3DATA
+ uninstall-dist_pythonyaml2DATA uninstall-dist_pythonyaml3DATA \
+ uninstall-dist_third_partyDATA
.in:
if sed \
diff --git a/python.d/README.md b/python.d/README.md
index 1b04ccdf..009265f7 100644
--- a/python.d/README.md
+++ b/python.d/README.md
@@ -125,13 +125,118 @@ If no configuration is given, module will attempt to read log file at `/var/log/
---
+# beanstalk
+
+Module provides server and tube level statistics:
+
+**Requirements:**
+ * `python-beanstalkc`
+ * `python-yaml`
+
+**Server statistics:**
+
+1. **Cpu usage** in cpu time
+ * user
+ * system
+
+2. **Jobs rate** in jobs/s
+ * total
+ * timeouts
+
+3. **Connections rate** in connections/s
+ * connections
+
+4. **Commands rate** in commands/s
+ * put
+ * peek
+ * peek-ready
+ * peek-delayed
+ * peek-buried
+ * reserve
+ * use
+ * watch
+ * ignore
+ * delete
+ * release
+ * bury
+ * kick
+ * stats
+ * stats-job
+ * stats-tube
+ * list-tubes
+ * list-tube-used
+ * list-tubes-watched
+ * pause-tube
+
+5. **Current tubes** in tubes
+ * tubes
+
+6. **Current jobs** in jobs
+ * urgent
+ * ready
+ * reserved
+ * delayed
+ * buried
+
+7. **Current connections** in connections
+ * written
+ * producers
+ * workers
+ * waiting
+
+8. **Binlog** in records/s
+ * written
+ * migrated
+
+9. **Uptime** in seconds
+ * uptime
+
+**Per tube statistics:**
+
+1. **Jobs rate** in jobs/s
+ * jobs
+
+2. **Jobs** in jobs
+ * using
+ * ready
+ * reserved
+ * delayed
+ * buried
+
+3. **Connections** in connections
+ * using
+ * waiting
+ * watching
+
+4. **Commands** in commands/s
+ * deletes
+ * pauses
+
+5. **Pause** in seconds
+ * since
+ * left
+
+
+### configuration
+
+Sample:
+
+```yaml
+host : '127.0.0.1'
+port : 11300
+```
+
+If no configuration is given, module will attempt to connect to beanstalkd on `127.0.0.1:11300` address
+
+---
+
# bind_rndc
Module parses bind dump file to collect real-time performance metrics
**Requirements:**
* Version of bind must be 9.6 +
- * Netdata must have permissions to run `rndc status`
+ * Netdata must have permissions to run `rndc stats`
It produces:
@@ -218,6 +323,42 @@ local:
---
+# couchdb
+
+This module monitors vital statistics of a local Apache CouchDB 2.x server, including:
+
+* Overall server reads/writes
+* HTTP traffic breakdown
+ * Request methods (`GET`, `PUT`, `POST`, etc.)
+ * Response status codes (`200`, `201`, `4xx`, etc.)
+* Active server tasks
+* Replication status (CouchDB 2.1 and up only)
+* Erlang VM stats
+* Optional per-database statistics: sizes, # of docs, # of deleted docs
+
+### Configuration
+
+Sample for a local server running on port 5984:
+```yaml
+local:
+ user: 'admin'
+ pass: 'password'
+ node: 'couchdb@127.0.0.1'
+```
+
+Be sure to specify a correct admin-level username and password.
+
+You may also need to change the `node` name; this should match the value of `-name NODENAME` in your CouchDB's `etc/vm.args` file. Typically this is of the form `couchdb@fully.qualified.domain.name` in a cluster, or `couchdb@127.0.0.1` / `couchdb@localhost` for a single-node server.
+
+If you want per-database statistics, these need to be added to the configuration, separated by spaces:
+```yaml
+local:
+ ...
+ databases: 'db1 db2 db3 ...'
+```
+
+---
+
# cpufreq
This module shows the current CPU frequency as set by the cpufreq kernel
@@ -271,6 +412,59 @@ It produces one aggregate chart or one chart per dns server, showing the query t
---
+# dnsdist
+
+Module monitor dnsdist performance and health metrics.
+
+Following charts are drawn:
+
+1. **Response latency**
+ * latency-slow
+ * latency100-1000
+ * latency50-100
+ * latency10-50
+ * latency1-10
+ * latency0-1
+
+2. **Cache performance**
+ * cache-hits
+ * cache-misses
+
+3. **ACL events**
+ * acl-drops
+ * rule-drop
+ * rule-nxdomain
+ * rule-refused
+
+4. **Noncompliant data**
+ * empty-queries
+ * no-policy
+ * noncompliant-queries
+ * noncompliant-responses
+
+5. **Queries**
+ * queries
+ * rdqueries
+ * rdqueries
+
+6. **Health**
+ * downstream-send-errors
+ * downstream-timeouts
+ * servfail-responses
+ * trunc-failures
+
+### configuration
+
+```yaml
+localhost:
+ name : 'local'
+ url : 'http://127.0.0.1:5053/jsonstat?command=stats'
+ user : 'username'
+ pass : 'password'
+ header:
+ X-API-Key: 'dnsdist-api-key'
+```
+
# dovecot
This module provides statistics information from dovecot server.
@@ -1278,6 +1472,45 @@ When no configuration file is found, module tries to connect to TCP/IP socket: `
---
+# powerdns
+
+Module monitor powerdns performance and health metrics.
+
+Following charts are drawn:
+
+1. **Queries and Answers**
+ * udp-queries
+ * udp-answers
+ * tcp-queries
+ * tcp-answers
+
+2. **Cache Usage**
+ * query-cache-hit
+ * query-cache-miss
+ * packetcache-hit
+ * packetcache-miss
+
+3. **Cache Size**
+ * query-cache-size
+ * packetcache-size
+ * key-cache-size
+ * meta-cache-size
+
+4. **Latency**
+ * latency
+
+### configuration
+
+```yaml
+local:
+ name : 'local'
+ url : 'http://127.0.0.1:8081/api/v1/servers/localhost/statistics'
+ header :
+ X-API-Key: 'change_me'
+```
+
+---
+
# rabbitmq
Module monitor rabbitmq performance and health metrics.
@@ -1321,10 +1554,10 @@ Following charts are drawn:
```yaml
socket:
name : 'local'
- host : '127.0.0.1'
- port : 15672
- user : 'guest'
- pass : 'guest'
+ host : '127.0.0.1'
+ port : 15672
+ user : 'guest'
+ pass : 'guest'
```
@@ -1573,60 +1806,64 @@ Module uses the `varnishstat` command to provide varnish cache statistics.
It produces:
-1. **Client metrics**
- * session accepted
- * session dropped
- * good client requests received
-
-2. **All history hit rate ratio**
- * cache hits in percent
- * cache miss in percent
- * cache hits for pass percent
-
-3. **Curent poll hit rate ratio**
- * cache hits in percent
- * cache miss in percent
- * cache hits for pass percent
-
-4. **Thread-related metrics** (only for varnish version 4+)
- * total number of threads
- * threads created
- * threads creation failed
- * threads hit max
- * length os session queue
- * sessions queued for thread
-
-5. **Backend health**
- * backend conn. success
- * backend conn. not attempted
- * backend conn. too many
- * backend conn. failures
- * backend conn. reuses
- * backend conn. recycles
- * backend conn. retry
- * backend requests made
-
-6. **Memory usage**
- * memory available in megabytes
- * memory allocated in megabytes
-
-7. **Problems summary**
- * session dropped
- * session accept failures
- * session pipe overflow
- * backend conn. not attempted
- * fetch failed (all causes)
- * backend conn. too many
- * threads hit max
- * threads destroyed
- * length of session queue
- * HTTP header overflows
- * ESI parse errors
- * ESI parse warnings
-
-8. **Uptime**
- * varnish instance uptime in seconds
+1. **Connections Statistics** in connections/s
+ * accepted
+ * dropped
+
+2. **Client Requests** in requests/s
+ * received
+
+3. **All History Hit Rate Ratio** in percent
+ * hit
+ * miss
+ * hitpass
+
+4. **Current Poll Hit Rate Ratio** in percent
+ * hit
+ * miss
+ * hitpass
+5. **Expired Objects** in expired/s
+ * objects
+
+6. **Least Recently Used Nuked Objects** in nuked/s
+ * objects
+
+
+7. **Number Of Threads In All Pools** in threads
+ * threads
+
+8. **Threads Statistics** in threads/s
+ * created
+ * failed
+ * limited
+
+9. **Current Queue Length** in requests
+ * in queue
+
+10. **Backend Connections Statistics** in connections/s
+ * successful
+ * unhealthy
+ * reused
+ * closed
+ * resycled
+ * failed
+
+10. **Requests To The Backend** in requests/s
+ * received
+
+11. **ESI Statistics** in problems/s
+ * errors
+ * warnings
+
+12. **Memory Usage** in MB
+ * free
+ * allocated
+
+13. **Uptime** in seconds
+ * uptime
+
+
### configuration
No configuration is needed.
diff --git a/python.d/apache.chart.py b/python.d/apache.chart.py
index 71fe0300..789b3c09 100644
--- a/python.d/apache.chart.py
+++ b/python.d/apache.chart.py
@@ -2,7 +2,7 @@
# Description: apache netdata python.d module
# Author: Pawel Krupa (paulfantom)
-from base import UrlService
+from bases.FrameworkServices.UrlService import UrlService
# default module values (can be overridden per job in `config`)
# update_every = 2
@@ -31,9 +31,7 @@ CHARTS = {
'options': [None, 'apache Workers', 'workers', 'workers', 'apache.workers', 'stacked'],
'lines': [
["idle"],
- ["idle_servers", 'idle'],
["busy"],
- ["busy_servers", 'busy']
]},
'reqpersec': {
'options': [None, 'apache Lifetime Avg. Requests/s', 'requests/s', 'statistics',
@@ -42,10 +40,10 @@ CHARTS = {
["requests_sec"]
]},
'bytespersec': {
- 'options': [None, 'apache Lifetime Avg. Bandwidth/s', 'kilobytes/s', 'statistics',
+ 'options': [None, 'apache Lifetime Avg. Bandwidth/s', 'kilobits/s', 'statistics',
'apache.bytesperreq', 'area'],
'lines': [
- ["size_sec", None, 'absolute', 1, 1000]
+ ["size_sec", None, 'absolute', 8, 1000]
]},
'requests': {
'options': [None, 'apache Requests', 'requests/s', 'requests', 'apache.requests', 'line'],
@@ -53,9 +51,9 @@ CHARTS = {
["requests", None, 'incremental']
]},
'net': {
- 'options': [None, 'apache Bandwidth', 'kilobytes/s', 'bandwidth', 'apache.net', 'area'],
+ 'options': [None, 'apache Bandwidth', 'kilobits/s', 'bandwidth', 'apache.net', 'area'],
'lines': [
- ["sent", None, 'incremental']
+ ["sent", None, 'incremental', 8, 1]
]},
'connections': {
'options': [None, 'apache Connections', 'connections', 'connections', 'apache.connections', 'line'],
@@ -94,15 +92,22 @@ class Service(UrlService):
self.url = self.configuration.get('url', 'http://localhost/server-status?auto')
def check(self):
- if UrlService.check(self):
- if 'idle_servers' in self._data_from_check:
- self.__module__ = 'lighttpd'
- for chart in self.definitions:
- opts = self.definitions[chart]['options']
- opts[1] = opts[1].replace('apache', 'lighttpd')
- opts[4] = opts[4].replace('apache', 'lighttpd')
- return True
- return False
+ self._manager = self._build_manager()
+ data = self._get_data()
+ if not data:
+ return None
+
+ if 'idle_servers' in data:
+ self.module_name = 'lighttpd'
+ for chart in self.definitions:
+ if chart == 'workers':
+ lines = self.definitions[chart]['lines']
+ lines[0] = ["idle_servers", 'idle']
+ lines[1] = ["busy_servers", 'busy']
+ opts = self.definitions[chart]['options']
+ opts[1] = opts[1].replace('apache', 'lighttpd')
+ opts[4] = opts[4].replace('apache', 'lighttpd')
+ return True
def _get_data(self):
"""
diff --git a/python.d/apache_cache.chart.py b/python.d/apache_cache.chart.py
deleted file mode 100644
index 3681a851..00000000
--- a/python.d/apache_cache.chart.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# -*- coding: utf-8 -*-
-# Description: apache cache netdata python.d module
-# Author: Pawel Krupa (paulfantom)
-
-from base import LogService
-
-priority = 60000
-retries = 60
-# update_every = 3
-
-ORDER = ['cache']
-CHARTS = {
- 'cache': {
- 'options': [None, 'apache cached responses', 'percent cached', 'cached', 'apache_cache.cache', 'stacked'],
- 'lines': [
- ["hit", 'cache', "percentage-of-absolute-row"],
- ["miss", None, "percentage-of-absolute-row"],
- ["other", None, "percentage-of-absolute-row"]
- ]}
-}
-
-
-class Service(LogService):
- def __init__(self, configuration=None, name=None):
- LogService.__init__(self, configuration=configuration, name=name)
- if len(self.log_path) == 0:
- self.log_path = "/var/log/apache2/cache.log"
- self.order = ORDER
- self.definitions = CHARTS
-
- def _get_data(self):
- """
- Parse new log lines
- :return: dict
- """
- try:
- raw = self._get_raw_data()
- if raw is None:
- return None
- elif not raw:
- return {'hit': 0,
- 'miss': 0,
- 'other': 0}
- except (ValueError, AttributeError):
- return None
-
- hit = 0
- miss = 0
- other = 0
- for line in raw:
- if "cache hit" in line:
- hit += 1
- elif "cache miss" in line:
- miss += 1
- else:
- other += 1
-
- return {'hit': hit,
- 'miss': miss,
- 'other': other}
diff --git a/python.d/beanstalk.chart.py b/python.d/beanstalk.chart.py
new file mode 100644
index 00000000..8880afdd
--- /dev/null
+++ b/python.d/beanstalk.chart.py
@@ -0,0 +1,250 @@
+# -*- coding: utf-8 -*-
+# Description: beanstalk netdata python.d module
+# Author: l2isbad
+
+try:
+ import beanstalkc
+ BEANSTALKC = True
+except ImportError:
+ BEANSTALKC = False
+
+try:
+ import yaml
+ YAML = True
+except ImportError:
+ YAML = False
+
+from bases.FrameworkServices.SimpleService import SimpleService
+
+# default module values (can be overridden per job in `config`)
+# update_every = 2
+priority = 60000
+retries = 60
+
+ORDER = ['cpu_usage', 'jobs_rate', 'connections_rate', 'commands_rate', 'current_tubes', 'current_jobs',
+ 'current_connections', 'binlog', 'uptime']
+
+CHARTS = {
+ 'cpu_usage': {
+ 'options': [None, 'Cpu Usage', 'cpu time', 'server statistics', 'beanstalk.cpu_usage', 'area'],
+ 'lines': [
+ ['rusage-utime', 'user', 'incremental'],
+ ['rusage-stime', 'system', 'incremental']
+ ]
+ },
+ 'jobs_rate': {
+ 'options': [None, 'Jobs Rate', 'jobs/s', 'server statistics', 'beanstalk.jobs_rate', 'line'],
+ 'lines': [
+ ['total-jobs', 'total', 'incremental'],
+ ['job-timeouts', 'timeouts', 'incremental']
+ ]
+ },
+ 'connections_rate': {
+ 'options': [None, 'Connections Rate', 'connections/s', 'server statistics', 'beanstalk.connections_rate',
+ 'area'],
+ 'lines': [
+ ['total-connections', 'connections', 'incremental']
+ ]
+ },
+ 'commands_rate': {
+ 'options': [None, 'Commands Rate', 'commands/s', 'server statistics', 'beanstalk.commands_rate', 'stacked'],
+ 'lines': [
+ ['cmd-put', 'put', 'incremental'],
+ ['cmd-peek', 'peek', 'incremental'],
+ ['cmd-peek-ready', 'peek-ready', 'incremental'],
+ ['cmd-peek-delayed', 'peek-delayed', 'incremental'],
+ ['cmd-peek-buried', 'peek-buried', 'incremental'],
+ ['cmd-reserve', 'reserve', 'incremental'],
+ ['cmd-use', 'use', 'incremental'],
+ ['cmd-watch', 'watch', 'incremental'],
+ ['cmd-ignore', 'ignore', 'incremental'],
+ ['cmd-delete', 'delete', 'incremental'],
+ ['cmd-release', 'release', 'incremental'],
+ ['cmd-bury', 'bury', 'incremental'],
+ ['cmd-kick', 'kick', 'incremental'],
+ ['cmd-stats', 'stats', 'incremental'],
+ ['cmd-stats-job', 'stats-job', 'incremental'],
+ ['cmd-stats-tube', 'stats-tube', 'incremental'],
+ ['cmd-list-tubes', 'list-tubes', 'incremental'],
+ ['cmd-list-tube-used', 'list-tube-used', 'incremental'],
+ ['cmd-list-tubes-watched', 'list-tubes-watched', 'incremental'],
+ ['cmd-pause-tube', 'pause-tube', 'incremental']
+ ]
+ },
+ 'current_tubes': {
+ 'options': [None, 'Current Tubes', 'tubes', 'server statistics', 'beanstalk.current_tubes', 'area'],
+ 'lines': [
+ ['current-tubes', 'tubes']
+ ]
+ },
+ 'current_jobs': {
+ 'options': [None, 'Current Jobs', 'jobs', 'server statistics', 'beanstalk.current_jobs', 'stacked'],
+ 'lines': [
+ ['current-jobs-urgent', 'urgent'],
+ ['current-jobs-ready', 'ready'],
+ ['current-jobs-reserved', 'reserved'],
+ ['current-jobs-delayed', 'delayed'],
+ ['current-jobs-buried', 'buried']
+ ]
+ },
+ 'current_connections': {
+ 'options': [None, 'Current Connections', 'connections', 'server statistics',
+ 'beanstalk.current_connections', 'line'],
+ 'lines': [
+ ['current-connections', 'written'],
+ ['current-producers', 'producers'],
+ ['current-workers', 'workers'],
+ ['current-waiting', 'waiting']
+ ]
+ },
+ 'binlog': {
+ 'options': [None, 'Binlog', 'records/s', 'server statistics', 'beanstalk.binlog', 'line'],
+ 'lines': [
+ ['binlog-records-written', 'written', 'incremental'],
+ ['binlog-records-migrated', 'migrated', 'incremental']
+ ]
+ },
+ 'uptime': {
+ 'options': [None, 'Uptime', 'seconds', 'server statistics', 'beanstalk.uptime', 'line'],
+ 'lines': [
+ ['uptime'],
+ ]
+ }
+}
+
+
+def tube_chart_template(name):
+ order = ['{0}_jobs_rate'.format(name),
+ '{0}_jobs'.format(name),
+ '{0}_connections'.format(name),
+ '{0}_commands'.format(name),
+ '{0}_pause'.format(name)
+ ]
+ family = 'tube {0}'.format(name)
+
+ charts = {
+ order[0]: {
+ 'options': [None, 'Job Rate', 'jobs/s', family, 'beanstalk.jobs_rate', 'area'],
+ 'lines': [
+ ['_'.join([name, 'total-jobs']), 'jobs', 'incremental']
+ ]},
+ order[1]: {
+ 'options': [None, 'Jobs', 'jobs', family, 'beanstalk.jobs', 'stacked'],
+ 'lines': [
+ ['_'.join([name, 'current-jobs-urgent']), 'urgent'],
+ ['_'.join([name, 'current-jobs-ready']), 'ready'],
+ ['_'.join([name, 'current-jobs-reserved']), 'reserved'],
+ ['_'.join([name, 'current-jobs-delayed']), 'delayed'],
+ ['_'.join([name, 'current-jobs-buried']), 'buried']
+ ]},
+ order[2]: {
+ 'options': [None, 'Connections', 'connections', family, 'beanstalk.connections', 'stacked'],
+ 'lines': [
+ ['_'.join([name, 'current-using']), 'using'],
+ ['_'.join([name, 'current-waiting']), 'waiting'],
+ ['_'.join([name, 'current-watching']), 'watching']
+ ]},
+ order[3]: {
+ 'options': [None, 'Commands', 'commands/s', family, 'beanstalk.commands', 'stacked'],
+ 'lines': [
+ ['_'.join([name, 'cmd-delete']), 'deletes', 'incremental'],
+ ['_'.join([name, 'cmd-pause-tube']), 'pauses', 'incremental']
+ ]},
+ order[4]: {
+ 'options': [None, 'Pause', 'seconds', family, 'beanstalk.pause', 'stacked'],
+ 'lines': [
+ ['_'.join([name, 'pause']), 'since'],
+ ['_'.join([name, 'pause-time-left']), 'left']
+ ]}
+
+ }
+
+ return order, charts
+
+
+class Service(SimpleService):
+ def __init__(self, configuration=None, name=None):
+ SimpleService.__init__(self, configuration=configuration, name=name)
+ self.configuration = configuration
+ self.order = list(ORDER)
+ self.definitions = dict(CHARTS)
+ self.conn = None
+ self.alive = True
+
+ def check(self):
+ if not BEANSTALKC:
+ self.error("'beanstalkc' module is needed to use beanstalk.chart.py")
+ return False
+
+ if not YAML:
+ self.error("'yaml' module is needed to use beanstalk.chart.py")
+ return False
+
+ self.conn = self.connect()
+
+ return True if self.conn else False
+
+ def get_data(self):
+ """
+ :return: dict
+ """
+ if not self.is_alive():
+ return None
+
+ active_charts = self.charts.active_charts()
+ data = dict()
+
+ try:
+ data.update(self.conn.stats())
+
+ for tube in self.conn.tubes():
+ stats = self.conn.stats_tube(tube)
+
+ if tube + '_jobs_rate' not in active_charts:
+ self.create_new_tube_charts(tube)
+
+ for stat in stats:
+ data['_'.join([tube, stat])] = stats[stat]
+
+ except beanstalkc.SocketError:
+ self.alive = False
+ return None
+
+ return data or None
+
+ def create_new_tube_charts(self, tube):
+ order, charts = tube_chart_template(tube)
+
+ for chart_name in order:
+ params = [chart_name] + charts[chart_name]['options']
+ dimensions = charts[chart_name]['lines']
+
+ new_chart = self.charts.add_chart(params)
+ for dimension in dimensions:
+ new_chart.add_dimension(dimension)
+
+ def connect(self):
+ host = self.configuration.get('host', '127.0.0.1')
+ port = self.configuration.get('port', 11300)
+ timeout = self.configuration.get('timeout', 1)
+ try:
+ return beanstalkc.Connection(host=host,
+ port=port,
+ connect_timeout=timeout,
+ parse_yaml=yaml.load)
+ except beanstalkc.SocketError as error:
+ self.error('Connection to {0}:{1} failed: {2}'.format(host, port, error))
+ return None
+
+ def reconnect(self):
+ try:
+ self.conn.reconnect()
+ self.alive = True
+ return True
+ except beanstalkc.SocketError:
+ return False
+
+ def is_alive(self):
+ if not self.alive:
+ return self.reconnect()
+ return True
diff --git a/python.d/bind_rndc.chart.py b/python.d/bind_rndc.chart.py
index 5a974928..cc96659b 100644
--- a/python.d/bind_rndc.chart.py
+++ b/python.d/bind_rndc.chart.py
@@ -2,11 +2,13 @@
# Description: bind rndc netdata python.d module
# Author: l2isbad
-from os.path import getsize
-from os import access, R_OK
-from subprocess import Popen
+import os
+
from collections import defaultdict
-from base import SimpleService
+from subprocess import Popen
+
+from bases.collection import find_binary
+from bases.FrameworkServices.SimpleService import SimpleService
priority = 60000
retries = 60
@@ -94,7 +96,7 @@ class Service(SimpleService):
self.order = ORDER
self.definitions = CHARTS
self.named_stats_path = self.configuration.get('named_stats_path', '/var/log/bind/named.stats')
- self.rndc = self.find_binary('rndc')
+ self.rndc = find_binary('rndc')
self.data = dict(nms_requests=0, nms_responses=0, nms_failure=0, nms_auth=0,
nms_non_auth=0, nms_nxrrset=0, nms_success=0, nms_nxdomain=0,
nms_recursion=0, nms_duplicate=0, nms_rejected_queries=0,
@@ -102,10 +104,10 @@ class Service(SimpleService):
def check(self):
if not self.rndc:
- self.error('Can\'t locate \'rndc\' binary or binary is not executable by netdata')
+ self.error('Can\'t locate "rndc" binary or binary is not executable by netdata')
return False
- if not access(self.named_stats_path, R_OK):
+ if not (os.path.isfile(self.named_stats_path) and os.access(self.named_stats_path, os.R_OK)):
self.error('Cannot access file %s' % self.named_stats_path)
return False
@@ -124,7 +126,7 @@ class Service(SimpleService):
"""
result = dict()
try:
- current_size = getsize(self.named_stats_path)
+ current_size = os.path.getsize(self.named_stats_path)
run_rndc = Popen([self.rndc, 'stats'], shell=False)
run_rndc.wait()
@@ -159,12 +161,12 @@ class Service(SimpleService):
parsed_key, chart_name = elem[0], elem[1]
for dimension_id, value in queries_mapper(data=parsed[parsed_key],
add=chart_name[:9]).items():
+
if dimension_id not in self.data:
dimension = dimension_id.replace(chart_name[:9], '')
- self._add_new_dimension(dimension_id=dimension_id,
- dimension=dimension,
- chart_name=chart_name,
- priority=self.priority + self.order.index(chart_name))
+ if dimension_id not in self.charts[chart_name]:
+ self.charts[chart_name].add_dimension([dimension_id, dimension, 'incremental'])
+
self.data[dimension_id] = value
self.data['stats_size'] = raw_data['size']
diff --git a/python.d/chrony.chart.py b/python.d/chrony.chart.py
index 96d7e696..8f331fa5 100644
--- a/python.d/chrony.chart.py
+++ b/python.d/chrony.chart.py
@@ -2,10 +2,10 @@
# Description: chrony netdata python.d module
# Author: Dominik Schloesser (domschl)
-from base import ExecutableService
+from bases.FrameworkServices.ExecutableService import ExecutableService
# default module values (can be overridden per job in `config`)
-# update_every = 10
+update_every = 5
priority = 60000
retries = 10
diff --git a/python.d/couchdb.chart.py b/python.d/couchdb.chart.py
new file mode 100644
index 00000000..558bac58
--- /dev/null
+++ b/python.d/couchdb.chart.py
@@ -0,0 +1,410 @@
+# -*- coding: utf-8 -*-
+# Description: couchdb netdata python.d module
+# Author: wohali <wohali@apache.org>
+# Thanks to l2isbad for good examples :)
+
+from collections import namedtuple, defaultdict
+from json import loads
+from threading import Thread
+from socket import gethostbyname, gaierror
+try:
+ from queue import Queue
+except ImportError:
+ from Queue import Queue
+
+from bases.FrameworkServices.UrlService import UrlService
+
+# default module values (can be overridden per job in `config`)
+update_every = 1
+priority = 60000
+retries = 60
+
+METHODS = namedtuple('METHODS', ['get_data', 'url', 'stats'])
+
+OVERVIEW_STATS = [
+ 'couchdb.database_reads.value',
+ 'couchdb.database_writes.value',
+ 'couchdb.httpd.view_reads.value'
+ 'couchdb.httpd_request_methods.COPY.value',
+ 'couchdb.httpd_request_methods.DELETE.value',
+ 'couchdb.httpd_request_methods.GET.value',
+ 'couchdb.httpd_request_methods.HEAD.value',
+ 'couchdb.httpd_request_methods.OPTIONS.value',
+ 'couchdb.httpd_request_methods.POST.value',
+ 'couchdb.httpd_request_methods.PUT.value',
+ 'couchdb.httpd_status_codes.200.value',
+ 'couchdb.httpd_status_codes.201.value',
+ 'couchdb.httpd_status_codes.202.value',
+ 'couchdb.httpd_status_codes.204.value',
+ 'couchdb.httpd_status_codes.206.value',
+ 'couchdb.httpd_status_codes.301.value',
+ 'couchdb.httpd_status_codes.302.value',
+ 'couchdb.httpd_status_codes.304.value',
+ 'couchdb.httpd_status_codes.400.value',
+ 'couchdb.httpd_status_codes.401.value',
+ 'couchdb.httpd_status_codes.403.value',
+ 'couchdb.httpd_status_codes.404.value',
+ 'couchdb.httpd_status_codes.405.value',
+ 'couchdb.httpd_status_codes.406.value',
+ 'couchdb.httpd_status_codes.409.value',
+ 'couchdb.httpd_status_codes.412.value',
+ 'couchdb.httpd_status_codes.413.value',
+ 'couchdb.httpd_status_codes.414.value',
+ 'couchdb.httpd_status_codes.415.value',
+ 'couchdb.httpd_status_codes.416.value',
+ 'couchdb.httpd_status_codes.417.value',
+ 'couchdb.httpd_status_codes.500.value',
+ 'couchdb.httpd_status_codes.501.value',
+ 'couchdb.open_os_files.value',
+ 'couch_replicator.jobs.running.value',
+ 'couch_replicator.jobs.pending.value',
+ 'couch_replicator.jobs.crashed.value',
+]
+
+SYSTEM_STATS = [
+ 'context_switches',
+ 'run_queue',
+ 'ets_table_count',
+ 'reductions',
+ 'memory.atom',
+ 'memory.atom_used',
+ 'memory.binary',
+ 'memory.code',
+ 'memory.ets',
+ 'memory.other',
+ 'memory.processes',
+ 'io_input',
+ 'io_output',
+ 'os_proc_count',
+ 'process_count',
+ 'internal_replication_jobs'
+]
+
+DB_STATS = [
+ 'doc_count',
+ 'doc_del_count',
+ 'sizes.file',
+ 'sizes.external',
+ 'sizes.active'
+]
+
+ORDER = [
+ 'activity',
+ 'request_methods',
+ 'response_codes',
+ 'active_tasks',
+ 'replicator_jobs',
+ 'open_files',
+ 'db_sizes_file',
+ 'db_sizes_external',
+ 'db_sizes_active',
+ 'db_doc_counts',
+ 'db_doc_del_counts',
+ 'erlang_memory',
+ 'erlang_proc_counts',
+ 'erlang_peak_msg_queue',
+ 'erlang_reductions'
+]
+
+CHARTS = {
+ 'activity': {
+ 'options': [None, 'Overall Activity', 'req/s',
+ 'dbactivity', 'couchdb.activity', 'stacked'],
+ 'lines': [
+ ['couchdb_database_reads', 'DB reads', 'incremental'],
+ ['couchdb_database_writes', 'DB writes', 'incremental'],
+ ['couchdb_httpd_view_reads', 'View reads', 'incremental']
+ ]
+ },
+ 'request_methods': {
+ 'options': [None, 'HTTP request methods', 'req/s',
+ 'httptraffic', 'couchdb.request_methods',
+ 'stacked'],
+ 'lines': [
+ ['couchdb_httpd_request_methods_COPY', 'COPY', 'incremental'],
+ ['couchdb_httpd_request_methods_DELETE', 'DELETE', 'incremental'],
+ ['couchdb_httpd_request_methods_GET', 'GET', 'incremental'],
+ ['couchdb_httpd_request_methods_HEAD', 'HEAD', 'incremental'],
+ ['couchdb_httpd_request_methods_OPTIONS', 'OPTIONS',
+ 'incremental'],
+ ['couchdb_httpd_request_methods_POST', 'POST', 'incremental'],
+ ['couchdb_httpd_request_methods_PUT', 'PUT', 'incremental']
+ ]
+ },
+ 'response_codes': {
+ 'options': [None, 'HTTP response status codes', 'resp/s',
+ 'httptraffic', 'couchdb.response_codes',
+ 'stacked'],
+ 'lines': [
+ ['couchdb_httpd_status_codes_200', '200 OK', 'incremental'],
+ ['couchdb_httpd_status_codes_201', '201 Created', 'incremental'],
+ ['couchdb_httpd_status_codes_202', '202 Accepted', 'incremental'],
+ ['couchdb_httpd_status_codes_2xx', 'Other 2xx Success',
+ 'incremental'],
+ ['couchdb_httpd_status_codes_3xx', '3xx Redirection',
+ 'incremental'],
+ ['couchdb_httpd_status_codes_4xx', '4xx Client error',
+ 'incremental'],
+ ['couchdb_httpd_status_codes_5xx', '5xx Server error',
+ 'incremental']
+ ]
+ },
+ 'open_files': {
+ 'options': [None, 'Open files', 'files',
+ 'ops', 'couchdb.open_files', 'line'],
+ 'lines': [
+ ['couchdb_open_os_files', '# files', 'absolute']
+ ]
+ },
+ 'active_tasks': {
+ 'options': [None, 'Active task breakdown', 'tasks',
+ 'ops', 'couchdb.active_tasks', 'stacked'],
+ 'lines': [
+ ['activetasks_indexer', 'Indexer', 'absolute'],
+ ['activetasks_database_compaction', 'DB Compaction', 'absolute'],
+ ['activetasks_replication', 'Replication', 'absolute'],
+ ['activetasks_view_compaction', 'View Compaction', 'absolute']
+ ]
+ },
+ 'replicator_jobs': {
+ 'options': [None, 'Replicator job breakdown', 'jobs',
+ 'ops', 'couchdb.replicator_jobs', 'stacked'],
+ 'lines': [
+ ['couch_replicator_jobs_running', 'Running', 'absolute'],
+ ['couch_replicator_jobs_pending', 'Pending', 'absolute'],
+ ['couch_replicator_jobs_crashed', 'Crashed', 'absolute'],
+ ['internal_replication_jobs', 'Internal replication jobs',
+ 'absolute']
+ ]
+ },
+ 'erlang_memory': {
+ 'options': [None, 'Erlang VM memory usage', 'bytes',
+ 'erlang', 'couchdb.erlang_vm_memory', 'stacked'],
+ 'lines': [
+ ['memory_atom', 'atom', 'absolute'],
+ ['memory_binary', 'binaries', 'absolute'],
+ ['memory_code', 'code', 'absolute'],
+ ['memory_ets', 'ets', 'absolute'],
+ ['memory_processes', 'procs', 'absolute'],
+ ['memory_other', 'other', 'absolute']
+ ]
+ },
+ 'erlang_reductions': {
+ 'options': [None, 'Erlang reductions', 'count',
+ 'erlang', 'couchdb.reductions', 'line'],
+ 'lines': [
+ ['reductions', 'reductions', 'incremental']
+ ]
+ },
+ 'erlang_proc_counts': {
+ 'options': [None, 'Process counts', 'count',
+ 'erlang', 'couchdb.proccounts', 'line'],
+ 'lines': [
+ ['os_proc_count', 'OS procs', 'absolute'],
+ ['process_count', 'erl procs', 'absolute']
+ ]
+ },
+ 'erlang_peak_msg_queue': {
+ 'options': [None, 'Peak message queue size', 'count',
+ 'erlang', 'couchdb.peakmsgqueue',
+ 'line'],
+ 'lines': [
+ ['peak_msg_queue', 'peak size', 'absolute']
+ ]
+ },
+ # Lines for the following are added as part of check()
+ 'db_sizes_file': {
+ 'options': [None, 'Database sizes (file)', 'KB',
+ 'perdbstats', 'couchdb.db_sizes_file', 'line'],
+ 'lines': []
+ },
+ 'db_sizes_external': {
+ 'options': [None, 'Database sizes (external)', 'KB',
+ 'perdbstats', 'couchdb.db_sizes_external', 'line'],
+ 'lines': []
+ },
+ 'db_sizes_active': {
+ 'options': [None, 'Database sizes (active)', 'KB',
+ 'perdbstats', 'couchdb.db_sizes_active', 'line'],
+ 'lines': []
+ },
+ 'db_doc_counts': {
+ 'options': [None, 'Database # of docs', 'docs',
+ 'perdbstats', 'couchdb_db_doc_count', 'line'],
+ 'lines': []
+ },
+ 'db_doc_del_counts': {
+ 'options': [None, 'Database # of deleted docs', 'docs',
+ 'perdbstats', 'couchdb_db_doc_del_count', 'line'],
+ 'lines': []
+ }
+}
+
+
+class Service(UrlService):
+ def __init__(self, configuration=None, name=None):
+ UrlService.__init__(self, configuration=configuration, name=name)
+ self.order = ORDER
+ self.definitions = CHARTS
+ self.host = self.configuration.get('host', '127.0.0.1')
+ self.port = self.configuration.get('port', 5984)
+ self.node = self.configuration.get('node', 'couchdb@127.0.0.1')
+ self.scheme = self.configuration.get('scheme', 'http')
+ self.user = self.configuration.get('user')
+ self.password = self.configuration.get('pass')
+ try:
+ self.dbs = self.configuration.get('databases').split(' ')
+ except (KeyError, AttributeError):
+ self.dbs = []
+
+ def check(self):
+ if not (self.host and self.port):
+ self.error('Host is not defined in the module configuration file')
+ return False
+ try:
+ self.host = gethostbyname(self.host)
+ except gaierror as error:
+ self.error(str(error))
+ return False
+ self.url = '{scheme}://{host}:{port}'.format(scheme=self.scheme,
+ host=self.host,
+ port=self.port)
+ stats = self.url + '/_node/{node}/_stats'.format(node=self.node)
+ active_tasks = self.url + '/_active_tasks'
+ system = self.url + '/_node/{node}/_system'.format(node=self.node)
+ self.methods = [METHODS(get_data=self._get_overview_stats,
+ url=stats,
+ stats=OVERVIEW_STATS),
+ METHODS(get_data=self._get_active_tasks_stats,
+ url=active_tasks,
+ stats=None),
+ METHODS(get_data=self._get_overview_stats,
+ url=system,
+ stats=SYSTEM_STATS),
+ METHODS(get_data=self._get_dbs_stats,
+ url=self.url,
+ stats=DB_STATS)]
+ # must initialise manager before using _get_raw_data
+ self._manager = self._build_manager()
+ self.dbs = [db for db in self.dbs
+ if self._get_raw_data(self.url + '/' + db)]
+ for db in self.dbs:
+ self.definitions['db_sizes_file']['lines'].append(
+ ['db_'+db+'_sizes_file', db, 'absolute', 1, 1000]
+ )
+ self.definitions['db_sizes_external']['lines'].append(
+ ['db_'+db+'_sizes_external', db, 'absolute', 1, 1000]
+ )
+ self.definitions['db_sizes_active']['lines'].append(
+ ['db_'+db+'_sizes_active', db, 'absolute', 1, 1000]
+ )
+ self.definitions['db_doc_counts']['lines'].append(
+ ['db_'+db+'_doc_count', db, 'absolute']
+ )
+ self.definitions['db_doc_del_counts']['lines'].append(
+ ['db_'+db+'_doc_del_count', db, 'absolute']
+ )
+ return UrlService.check(self)
+
+ def _get_data(self):
+ threads = list()
+ queue = Queue()
+ result = dict()
+
+ for method in self.methods:
+ th = Thread(target=method.get_data,
+ args=(queue, method.url, method.stats))
+ th.start()
+ threads.append(th)
+
+ for thread in threads:
+ thread.join()
+ result.update(queue.get())
+
+ # self.info('couchdb result = ' + str(result))
+ return result or None
+
+ def _get_overview_stats(self, queue, url, stats):
+ raw_data = self._get_raw_data(url)
+ if not raw_data:
+ return queue.put(dict())
+ data = loads(raw_data)
+ to_netdata = self._fetch_data(raw_data=data, metrics=stats)
+ if 'message_queues' in data:
+ to_netdata['peak_msg_queue'] = get_peak_msg_queue(data)
+ return queue.put(to_netdata)
+
+ def _get_active_tasks_stats(self, queue, url, _):
+ taskdict = defaultdict(int)
+ taskdict["activetasks_indexer"] = 0
+ taskdict["activetasks_database_compaction"] = 0
+ taskdict["activetasks_replication"] = 0
+ taskdict["activetasks_view_compaction"] = 0
+ raw_data = self._get_raw_data(url)
+ if not raw_data:
+ return queue.put(dict())
+ data = loads(raw_data)
+ for task in data:
+ taskdict["activetasks_" + task["type"]] += 1
+ return queue.put(dict(taskdict))
+
+ def _get_dbs_stats(self, queue, url, stats):
+ to_netdata = {}
+ for db in self.dbs:
+ raw_data = self._get_raw_data(url + '/' + db)
+ if not raw_data:
+ continue
+ data = loads(raw_data)
+ for metric in stats:
+ value = data
+ metrics_list = metric.split('.')
+ try:
+ for m in metrics_list:
+ value = value[m]
+ except KeyError as e:
+ self.debug('cannot process ' + metric + ' for ' + db
+ + ": " + str(e))
+ continue
+ metric_name = 'db_{0}_{1}'.format(db, '_'.join(metrics_list))
+ to_netdata[metric_name] = value
+ return queue.put(to_netdata)
+
+ def _fetch_data(self, raw_data, metrics):
+ data = dict()
+ for metric in metrics:
+ value = raw_data
+ metrics_list = metric.split('.')
+ try:
+ for m in metrics_list:
+ value = value[m]
+ except KeyError as e:
+ self.debug('cannot process ' + metric + ': ' + str(e))
+ continue
+ # strip off .value from end of stat
+ if metrics_list[-1] == 'value':
+ metrics_list = metrics_list[:-1]
+ # sum up 3xx/4xx/5xx
+ if metrics_list[0:2] == ['couchdb', 'httpd_status_codes'] and \
+ int(metrics_list[2]) > 202:
+ metrics_list[2] = '{0}xx'.format(int(metrics_list[2]) // 100)
+ if '_'.join(metrics_list) in data:
+ data['_'.join(metrics_list)] += value
+ else:
+ data['_'.join(metrics_list)] = value
+ else:
+ data['_'.join(metrics_list)] = value
+ return data
+
+
+def get_peak_msg_queue(data):
+ maxsize = 0
+ queues = data['message_queues']
+ for queue in iter(queues.values()):
+ if isinstance(queue, dict) and 'count' in queue:
+ value = queue['count']
+ elif isinstance(queue, int):
+ value = queue
+ else:
+ continue
+ maxsize = max(maxsize, value)
+ return maxsize
diff --git a/python.d/cpufreq.chart.py b/python.d/cpufreq.chart.py
index 01cc22b0..3abde736 100644
--- a/python.d/cpufreq.chart.py
+++ b/python.d/cpufreq.chart.py
@@ -4,8 +4,8 @@
import glob
import os
-import time
-from base import SimpleService
+
+from bases.FrameworkServices.SimpleService import SimpleService
# default module values (can be overridden per job in `config`)
# update_every = 2
@@ -14,12 +14,13 @@ ORDER = ['cpufreq']
CHARTS = {
'cpufreq': {
- 'options': [None, 'CPU Clock', 'MHz', 'cpufreq', None, 'line'],
+ 'options': [None, 'CPU Clock', 'MHz', 'cpufreq', 'cpufreq.cpufreq', 'line'],
'lines': [
# lines are created dynamically in `check()` method
]}
}
+
class Service(SimpleService):
def __init__(self, configuration=None, name=None):
prefix = os.getenv('NETDATA_HOST_PREFIX', "")
@@ -29,7 +30,7 @@ class Service(SimpleService):
SimpleService.__init__(self, configuration=configuration, name=name)
self.order = ORDER
self.definitions = CHARTS
- self._orig_name = ""
+ self.fake_name = 'cpu'
self.assignment = {}
self.accurate_exists = True
self.accurate_last = {}
@@ -72,7 +73,6 @@ class Service(SimpleService):
if accurate_ok:
return data
-
for name, paths in self.assignment.items():
data[name] = open(paths['inaccurate'], 'r').read()
@@ -84,8 +84,6 @@ class Service(SimpleService):
except (KeyError, TypeError):
self.error("No path specified. Using: '" + self.sys_dir + "'")
- self._orig_name = self.chart_name
-
for path in glob.glob(self.sys_dir + '/system/cpu/cpu*/cpufreq/stats/time_in_state'):
path_elem = path.split('/')
cpu = path_elem[-4]
@@ -113,14 +111,3 @@ class Service(SimpleService):
return True
- def create(self):
- self.chart_name = "cpu"
- status = SimpleService.create(self)
- self.chart_name = self._orig_name
- return status
-
- def update(self, interval):
- self.chart_name = "cpu"
- status = SimpleService.update(self, interval=interval)
- self.chart_name = self._orig_name
- return status
diff --git a/python.d/cpuidle.chart.py b/python.d/cpuidle.chart.py
index e5ed49bd..d14c6aaf 100644
--- a/python.d/cpuidle.chart.py
+++ b/python.d/cpuidle.chart.py
@@ -5,8 +5,8 @@
import glob
import os
import platform
-import time
-from base import SimpleService
+
+from bases.FrameworkServices.SimpleService import SimpleService
import ctypes
syscall = ctypes.CDLL('libc.so.6').syscall
@@ -14,6 +14,7 @@ syscall = ctypes.CDLL('libc.so.6').syscall
# default module values (can be overridden per job in `config`)
# update_every = 2
+
class Service(SimpleService):
def __init__(self, configuration=None, name=None):
prefix = os.getenv('NETDATA_HOST_PREFIX', "")
@@ -24,11 +25,12 @@ class Service(SimpleService):
SimpleService.__init__(self, configuration=configuration, name=name)
self.order = []
self.definitions = {}
- self._orig_name = ""
+ self.fake_name = 'cpu'
self.assignment = {}
self.last_schedstat = None
- def __gettid(self):
+ @staticmethod
+ def __gettid():
# This is horrendous. We need the *thread id* (not the *process id*),
# but there's no Python standard library way of doing that. If you need
# to enable this module on a non-x86 machine type, you'll have to find
@@ -108,8 +110,6 @@ class Service(SimpleService):
self.error("Cannot get thread ID. Stats would be completely broken.")
return False
- self._orig_name = self.chart_name
-
for path in sorted(glob.glob(self.sys_dir + '/cpu*/cpuidle/state*/name')):
# ['', 'sys', 'devices', 'system', 'cpu', 'cpu0', 'cpuidle', 'state3', 'name']
path_elem = path.split('/')
@@ -122,7 +122,7 @@ class Service(SimpleService):
self.order.append(orderid)
active_name = '%s_active_time' % (cpu,)
self.definitions[orderid] = {
- 'options': [None, 'C-state residency', 'time%', 'cpuidle', None, 'stacked'],
+ 'options': [None, 'C-state residency', 'time%', 'cpuidle', 'cpuidle.cpuidle', 'stacked'],
'lines': [
[active_name, 'C0 (active)', 'percentage-of-incremental-row', 1, 1],
],
@@ -146,16 +146,3 @@ class Service(SimpleService):
return True
- def create(self):
- self.chart_name = "cpu"
- status = SimpleService.create(self)
- self.chart_name = self._orig_name
- return status
-
- def update(self, interval):
- self.chart_name = "cpu"
- status = SimpleService.update(self, interval=interval)
- self.chart_name = self._orig_name
- return status
-
-# vim: set ts=4 sts=4 sw=4 et:
diff --git a/python.d/dns_query_time.chart.py b/python.d/dns_query_time.chart.py
index 9053d9a1..9a794a9c 100644
--- a/python.d/dns_query_time.chart.py
+++ b/python.d/dns_query_time.chart.py
@@ -2,6 +2,10 @@
# Description: dns_query_time netdata python.d module
# Author: l2isbad
+from random import choice
+from threading import Thread
+from socket import getaddrinfo, gaierror
+
try:
from time import monotonic as time
except ImportError:
@@ -15,10 +19,8 @@ try:
from queue import Queue
except ImportError:
from Queue import Queue
-from random import choice
-from threading import Thread
-from socket import gethostbyname, gaierror
-from base import SimpleService
+
+from bases.FrameworkServices.SimpleService import SimpleService
# default module values (can be overridden per job in `config`)
@@ -43,7 +45,6 @@ class Service(SimpleService):
return False
self.timeout = self.timeout if isinstance(self.timeout, int) else 4
- self.update_every = self.timeout + 1 if self.update_every <= self.timeout else self.update_every
if not all([self.domains, self.server_list,
isinstance(self.server_list, str), isinstance(self.domains, str)]):
@@ -69,9 +70,7 @@ class Service(SimpleService):
if not self.server_list:
return False
- self._data_from_check = data
self.order, self.definitions = create_charts(aggregate=self.aggregate, server_list=self.server_list)
- self.info(str({'domains': len(self.domains), 'servers': self.server_list}))
return True
def _get_data(self, timeout=None):
@@ -110,7 +109,7 @@ def dns_request(server_list, timeout, domains):
def check_ns(ns):
try:
- return gethostbyname(ns)
+ return getaddrinfo(ns, 'domain')[0][4][0]
except gaierror:
return False
diff --git a/python.d/dnsdist.chart.py b/python.d/dnsdist.chart.py
new file mode 100644
index 00000000..b40112cb
--- /dev/null
+++ b/python.d/dnsdist.chart.py
@@ -0,0 +1,101 @@
+# -*- coding: utf-8 -*-
+from json import loads
+from bases.FrameworkServices.UrlService import UrlService
+
+ORDER = ['queries', 'queries_dropped', 'packets_dropped', 'answers', 'backend_responses', 'backend_commerrors', 'backend_errors', 'cache', 'servercpu', 'servermem', 'query_latency', 'query_latency_avg']
+CHARTS = {
+ 'queries': {
+ 'options': [None, 'Client queries received', 'queries/s', 'queries', 'dnsdist.queries', 'line'],
+ 'lines': [
+ ['queries', 'all', 'incremental'],
+ ['rdqueries', 'recursive', 'incremental'],
+ ['empty-queries', 'empty', 'incremental']
+ ]},
+ 'queries_dropped': {
+ 'options': [None, 'Client queries dropped', 'queries/s', 'queries', 'dnsdist.queries_dropped', 'line'],
+ 'lines': [
+ ['rule-drop', 'rule drop', 'incremental'],
+ ['dyn-blocked', 'dynamic block', 'incremental'],
+ ['no-policy', 'no policy', 'incremental'],
+ ['noncompliant-queries', 'non compliant', 'incremental']
+ ]},
+ 'packets_dropped': {
+ 'options': [None, 'Packets dropped', 'packets/s', 'packets', 'dnsdist.packets_dropped', 'line'],
+ 'lines': [
+ ['acl-drops', 'acl', 'incremental']
+ ]},
+ 'answers': {
+ 'options': [None, 'Answers statistics', 'answers/s', 'answers', 'dnsdist.answers', 'line'],
+ 'lines': [
+ ['self-answered', 'self answered', 'incremental'],
+ ['rule-nxdomain', 'nxdomain', 'incremental', -1],
+ ['rule-refused', 'refused', 'incremental', -1],
+ ['trunc-failures', 'trunc failures', 'incremental', -1]
+ ]},
+ 'backend_responses': {
+ 'options': [None, 'Backend responses', 'responses/s', 'backends', 'dnsdist.backend_responses', 'line'],
+ 'lines': [
+ ['responses', 'responses', 'incremental']
+ ]},
+ 'backend_commerrors': {
+ 'options': [None, 'Backend Communication Errors', 'errors/s', 'backends', 'dnsdist.backend_commerrors', 'line'],
+ 'lines': [
+ ['downstream-send-errors', 'send errors', 'incremental']
+ ]},
+ 'backend_errors': {
+ 'options': [None, 'Backend error responses', 'responses/s', 'backends', 'dnsdist.backend_errors', 'line'],
+ 'lines': [
+ ['downstream-timeouts', 'timeout', 'incremental'],
+ ['servfail-responses', 'servfail', 'incremental'],
+ ['noncompliant-responses', 'non compliant', 'incremental']
+ ]},
+ 'cache': {
+ 'options': [None, 'Cache performance', 'answers/s', 'cache', 'dnsdist.cache', 'area'],
+ 'lines': [
+ ['cache-hits', 'hits', 'incremental'],
+ ['cache-misses', 'misses', 'incremental', -1]
+ ]},
+ 'servercpu': {
+ 'options': [None, 'DNSDIST server CPU utilization', 'ms/s', 'server', 'dnsdist.servercpu', 'stacked'],
+ 'lines': [
+ ['cpu-sys-msec', 'system state', 'incremental'],
+ ['cpu-user-msec', 'user state', 'incremental']
+ ]},
+ 'servermem': {
+ 'options': [None, 'DNSDIST server memory utilization', 'MB', 'server', 'dnsdist.servermem', 'area'],
+ 'lines': [
+ ['real-memory-usage', 'memory usage', 'absolute', 1, 1048576]
+ ]},
+ 'query_latency': {
+ 'options': [None, 'Query latency', 'queries/s', 'latency', 'dnsdist.query_latency', 'stacked'],
+ 'lines': [
+ ['latency0-1', '1ms', 'incremental'],
+ ['latency1-10', '10ms', 'incremental'],
+ ['latency10-50', '50ms', 'incremental'],
+ ['latency50-100', '100ms', 'incremental'],
+ ['latency100-1000', '1sec', 'incremental'],
+ ['latency-slow', 'slow', 'incremental']
+ ]},
+ 'query_latency_avg': {
+ 'options': [None, 'Average latency for the last N queries', 'ms/query', 'latency', 'dnsdist.query_latency_avg', 'line'],
+ 'lines': [
+ ['latency-avg100', '100', 'absolute'],
+ ['latency-avg1000', '1k', 'absolute'],
+ ['latency-avg10000', '10k', 'absolute'],
+ ['latency-avg1000000', '1000k', 'absolute']
+ ]}
+}
+
+class Service(UrlService):
+ def __init__(self, configuration=None, name=None):
+ UrlService.__init__(self, configuration=configuration, name=name)
+ self.order = ORDER
+ self.definitions = CHARTS
+
+ def _get_data(self):
+ data = self._get_raw_data()
+ if not data:
+ return None
+
+ return loads(data)
+
diff --git a/python.d/dovecot.chart.py b/python.d/dovecot.chart.py
index b2bef495..5689f2ec 100644
--- a/python.d/dovecot.chart.py
+++ b/python.d/dovecot.chart.py
@@ -2,7 +2,7 @@
# Description: dovecot netdata python.d module
# Author: Pawel Krupa (paulfantom)
-from base import SocketService
+from bases.FrameworkServices.SocketService import SocketService
# default module values (can be overridden per job in `config`)
# update_every = 2
@@ -117,19 +117,10 @@ class Service(SocketService):
data = raw.split('\n')[:2]
desc = data[0].split('\t')
vals = data[1].split('\t')
- # ret = dict(zip(desc, vals))
- ret = {}
- for i in range(len(desc)):
+ ret = dict()
+ for i, _ in enumerate(desc):
try:
- #d = str(desc[i])
- #if d in ('user_cpu', 'sys_cpu', 'clock_time'):
- # val = float(vals[i])
- #else:
- # val = int(vals[i])
- #ret[d] = val
ret[str(desc[i])] = int(vals[i])
except ValueError:
- pass
- if len(ret) == 0:
- return None
- return ret
+ continue
+ return ret or None
diff --git a/python.d/elasticsearch.chart.py b/python.d/elasticsearch.chart.py
index 2e0f18c0..afdf0f1b 100644
--- a/python.d/elasticsearch.chart.py
+++ b/python.d/elasticsearch.chart.py
@@ -11,10 +11,9 @@ try:
except ImportError:
from Queue import Queue
-from base import UrlService
+from bases.FrameworkServices.UrlService import UrlService
# default module values (can be overridden per job in `config`)
-# update_every = 2
update_every = 5
priority = 60000
retries = 60
diff --git a/python.d/example.chart.py b/python.d/example.chart.py
index adf97a92..ee7ff62f 100644
--- a/python.d/example.chart.py
+++ b/python.d/example.chart.py
@@ -2,35 +2,46 @@
# Description: example netdata python.d module
# Author: Pawel Krupa (paulfantom)
-import os
-import random
-from base import SimpleService
+from random import SystemRandom
-NAME = os.path.basename(__file__).replace(".chart.py", "")
+from bases.FrameworkServices.SimpleService import SimpleService
# default module values
# update_every = 4
priority = 90000
retries = 60
+ORDER = ['random']
+CHARTS = {
+ 'random': {
+ 'options': [None, 'A random number', 'random number', 'random', 'random', 'line'],
+ 'lines': [
+ ['random1']
+ ]
+ }
+}
+
class Service(SimpleService):
def __init__(self, configuration=None, name=None):
- super(self.__class__,self).__init__(configuration=configuration, name=name)
+ SimpleService.__init__(self, configuration=configuration, name=name)
+ self.order = ORDER
+ self.definitions = CHARTS
+ self.random = SystemRandom()
- def check(self):
- return True
-
- def create(self):
- self.chart("example.python_random", '', 'A random number', 'random number',
- 'random', 'random', 'line', self.priority, self.update_every)
- self.dimension('random1')
- self.commit()
- return True
-
- def update(self, interval):
- self.begin("example.python_random", interval)
- self.set("random1", random.randint(0, 100))
- self.end()
- self.commit()
+ @staticmethod
+ def check():
return True
+
+ def get_data(self):
+ data = dict()
+
+ for i in range(1, 4):
+ dimension_id = ''.join(['random', str(i)])
+
+ if dimension_id not in self.charts['random']:
+ self.charts['random'].add_dimension([dimension_id])
+
+ data[dimension_id] = self.random.randint(0, 100)
+
+ return data
diff --git a/python.d/exim.chart.py b/python.d/exim.chart.py
index 1858cbc7..2e5b924b 100644
--- a/python.d/exim.chart.py
+++ b/python.d/exim.chart.py
@@ -2,7 +2,7 @@
# Description: exim netdata python.d module
# Author: Pawel Krupa (paulfantom)
-from base import ExecutableService
+from bases.FrameworkServices.ExecutableService import ExecutableService
# default module values (can be overridden per job in `config`)
# update_every = 2
diff --git a/python.d/fail2ban.chart.py b/python.d/fail2ban.chart.py
index 5238fa16..895833f8 100644
--- a/python.d/fail2ban.chart.py
+++ b/python.d/fail2ban.chart.py
@@ -2,12 +2,15 @@
# Description: fail2ban log netdata python.d module
# Author: l2isbad
+import bisect
+
+from glob import glob
from re import compile as r_compile
from os import access as is_accessible, R_OK
from os.path import isdir, getsize
-from glob import glob
-import bisect
-from base import LogService
+
+
+from bases.FrameworkServices.LogService import LogService
priority = 60000
retries = 60
@@ -180,8 +183,8 @@ def find_jails_in_files(list_of_files, print_error):
jails_list = list()
for conf in list_of_files:
if is_accessible(conf, R_OK):
- with open(conf, 'rt') as conf:
- raw_data = conf.readlines()
+ with open(conf, 'rt') as f:
+ raw_data = f.readlines()
data = ' '.join(line for line in raw_data if line.startswith(('[', 'enabled')))
jails_list.extend(REGEX_JAILS.findall(data))
else:
diff --git a/python.d/freeradius.chart.py b/python.d/freeradius.chart.py
index f3de1573..3acc58d1 100644
--- a/python.d/freeradius.chart.py
+++ b/python.d/freeradius.chart.py
@@ -2,15 +2,19 @@
# Description: freeradius netdata python.d module
# Author: l2isbad
-from base import SimpleService
from re import findall
from subprocess import Popen, PIPE
+from bases.collection import find_binary
+from bases.FrameworkServices.SimpleService import SimpleService
+
# default module values (can be overridden per job in `config`)
priority = 60000
retries = 60
update_every = 15
+RADIUS_MSG = 'Message-Authenticator = 0x00, FreeRADIUS-Statistics-Type = 15, Response-Packet-Type = Access-Accept'
+
# charts order (can be overridden if you want less charts, or different order)
ORDER = ['authentication', 'accounting', 'proxy-auth', 'proxy-acct']
@@ -18,34 +22,46 @@ CHARTS = {
'authentication': {
'options': [None, "Authentication", "packets/s", 'Authentication', 'freerad.auth', 'line'],
'lines': [
- ['access-accepts', None, 'incremental'], ['access-rejects', None, 'incremental'],
- ['auth-dropped-requests', None, 'incremental'], ['auth-duplicate-requests', None, 'incremental'],
- ['auth-invalid-requests', None, 'incremental'], ['auth-malformed-requests', None, 'incremental'],
- ['auth-unknown-types', None, 'incremental']
- ]},
- 'accounting': {
+ ['access-accepts', None, 'incremental'],
+ ['access-rejects', None, 'incremental'],
+ ['auth-dropped-requests', 'dropped-requests', 'incremental'],
+ ['auth-duplicate-requests', 'duplicate-requests', 'incremental'],
+ ['auth-invalid-requests', 'invalid-requests', 'incremental'],
+ ['auth-malformed-requests', 'malformed-requests', 'incremental'],
+ ['auth-unknown-types', 'unknown-types', 'incremental']
+ ]},
+ 'accounting': {
'options': [None, "Accounting", "packets/s", 'Accounting', 'freerad.acct', 'line'],
'lines': [
- ['accounting-requests', None, 'incremental'], ['accounting-responses', None, 'incremental'],
- ['acct-dropped-requests', None, 'incremental'], ['acct-duplicate-requests', None, 'incremental'],
- ['acct-invalid-requests', None, 'incremental'], ['acct-malformed-requests', None, 'incremental'],
- ['acct-unknown-types', None, 'incremental']
+ ['accounting-requests', 'requests', 'incremental'],
+ ['accounting-responses', 'responses', 'incremental'],
+ ['acct-dropped-requests', 'dropped-requests', 'incremental'],
+ ['acct-duplicate-requests', 'duplicate-requests', 'incremental'],
+ ['acct-invalid-requests', 'invalid-requests', 'incremental'],
+ ['acct-malformed-requests', 'malformed-requests', 'incremental'],
+ ['acct-unknown-types', 'unknown-types', 'incremental']
]},
'proxy-auth': {
'options': [None, "Proxy Authentication", "packets/s", 'Authentication', 'freerad.proxy.auth', 'line'],
'lines': [
- ['proxy-access-accepts', None, 'incremental'], ['proxy-access-rejects', None, 'incremental'],
- ['proxy-auth-dropped-requests', None, 'incremental'], ['proxy-auth-duplicate-requests', None, 'incremental'],
- ['proxy-auth-invalid-requests', None, 'incremental'], ['proxy-auth-malformed-requests', None, 'incremental'],
- ['proxy-auth-unknown-types', None, 'incremental']
+ ['proxy-access-accepts', 'access-accepts', 'incremental'],
+ ['proxy-access-rejects', 'access-rejects', 'incremental'],
+ ['proxy-auth-dropped-requests', 'dropped-requests', 'incremental'],
+ ['proxy-auth-duplicate-requests', 'duplicate-requests', 'incremental'],
+ ['proxy-auth-invalid-requests', 'invalid-requests', 'incremental'],
+ ['proxy-auth-malformed-requests', 'malformed-requests', 'incremental'],
+ ['proxy-auth-unknown-types', 'unknown-types', 'incremental']
]},
- 'proxy-acct': {
+ 'proxy-acct': {
'options': [None, "Proxy Accounting", "packets/s", 'Accounting', 'freerad.proxy.acct', 'line'],
'lines': [
- ['proxy-accounting-requests', None, 'incremental'], ['proxy-accounting-responses', None, 'incremental'],
- ['proxy-acct-dropped-requests', None, 'incremental'], ['proxy-acct-duplicate-requests', None, 'incremental'],
- ['proxy-acct-invalid-requests', None, 'incremental'], ['proxy-acct-malformed-requests', None, 'incremental'],
- ['proxy-acct-unknown-types', None, 'incremental']
+ ['proxy-accounting-requests', 'requests', 'incremental'],
+ ['proxy-accounting-responses', 'responses', 'incremental'],
+ ['proxy-acct-dropped-requests', 'dropped-requests', 'incremental'],
+ ['proxy-acct-duplicate-requests', 'duplicate-requests', 'incremental'],
+ ['proxy-acct-invalid-requests', 'invalid-requests', 'incremental'],
+ ['proxy-acct-malformed-requests', 'malformed-requests', 'incremental'],
+ ['proxy-acct-unknown-types', 'unknown-types', 'incremental']
]}
}
@@ -54,31 +70,33 @@ CHARTS = {
class Service(SimpleService):
def __init__(self, configuration=None, name=None):
SimpleService.__init__(self, configuration=configuration, name=name)
+ self.definitions = CHARTS
self.host = self.configuration.get('host', 'localhost')
self.port = self.configuration.get('port', '18121')
- self.secret = self.configuration.get('secret', 'adminsecret')
+ self.secret = self.configuration.get('secret')
self.acct = self.configuration.get('acct', False)
self.proxy_auth = self.configuration.get('proxy_auth', False)
self.proxy_acct = self.configuration.get('proxy_acct', False)
- self.echo = self.find_binary('echo')
- self.radclient = self.find_binary('radclient')
- self.sub_echo = [self.echo, 'Message-Authenticator = 0x00, FreeRADIUS-Statistics-Type = 15, Response-Packet-Type = Access-Accept']
- self.sub_radclient = [self.radclient, '-r', '1', '-t', '1', ':'.join([self.host, self.port]), 'status', self.secret]
-
+ chart_choice = [True, bool(self.acct), bool(self.proxy_auth), bool(self.proxy_acct)]
+ self.order = [chart for chart, choice in zip(ORDER, chart_choice) if choice]
+ self.echo = find_binary('echo')
+ self.radclient = find_binary('radclient')
+ self.sub_echo = [self.echo, RADIUS_MSG]
+ self.sub_radclient = [self.radclient, '-r', '1', '-t', '1', '-x',
+ ':'.join([self.host, self.port]), 'status', self.secret]
+
def check(self):
if not all([self.echo, self.radclient]):
- self.error('Can\'t locate \'radclient\' binary or binary is not executable by netdata')
+ self.error('Can\'t locate "radclient" binary or binary is not executable by netdata')
return False
+ if not self.secret:
+ self.error('"secret" not set')
+ return None
+
if self._get_raw_data():
- chart_choice = [True, bool(self.acct), bool(self.proxy_auth), bool(self.proxy_acct)]
- self.order = [chart for chart, choice in zip(ORDER, chart_choice) if choice]
- self.definitions = dict([chart for chart in CHARTS.items() if chart[0] in self.order])
- self.info('Plugin was started succesfully')
return True
- else:
- self.error('Request returned no data. Is server alive? Used options: host {0}, port {1}, secret {2}'.format(self.host, self.port, self.secret))
- return False
-
+ self.error('Request returned no data. Is server alive?')
+ return False
def _get_data(self):
"""
@@ -91,7 +109,8 @@ class Service(SimpleService):
def _get_raw_data(self):
"""
The following code is equivalent to
- 'echo "Message-Authenticator = 0x00, FreeRADIUS-Statistics-Type = 15, Response-Packet-Type = Access-Accept" | radclient -t 1 -r 1 host:port status secret'
+ 'echo "Message-Authenticator = 0x00, FreeRADIUS-Statistics-Type = 15, Response-Packet-Type = Access-Accept"
+ | radclient -t 1 -r 1 host:port status secret'
:return: str
"""
try:
@@ -99,10 +118,8 @@ class Service(SimpleService):
process_rad = Popen(self.sub_radclient, stdin=process_echo.stdout, stdout=PIPE, stderr=PIPE, shell=False)
process_echo.stdout.close()
raw_result = process_rad.communicate()[0]
- except Exception:
+ except OSError:
return None
- else:
- if process_rad.returncode is 0:
- return raw_result.decode()
- else:
- return None
+ if process_rad.returncode is 0:
+ return raw_result.decode()
+ return None
diff --git a/python.d/go_expvar.chart.py b/python.d/go_expvar.chart.py
index e1a334cc..cbd46257 100644
--- a/python.d/go_expvar.chart.py
+++ b/python.d/go_expvar.chart.py
@@ -3,9 +3,10 @@
# Author: Jan Kral (kralewitz)
from __future__ import division
-from base import UrlService
import json
+from bases.FrameworkServices.UrlService import UrlService
+
# default module values (can be overridden per job in `config`)
# update_every = 2
priority = 60000
@@ -14,44 +15,52 @@ retries = 60
MEMSTATS_CHARTS = {
'memstats_heap': {
- 'options': ['heap', 'memory: size of heap memory structures', 'kB', 'memstats', 'expvar.memstats.heap', 'line'],
+ 'options': ['heap', 'memory: size of heap memory structures', 'kB', 'memstats',
+ 'expvar.memstats.heap', 'line'],
'lines': [
['memstats_heap_alloc', 'alloc', 'absolute', 1, 1024],
['memstats_heap_inuse', 'inuse', 'absolute', 1, 1024]
]},
'memstats_stack': {
- 'options': ['stack', 'memory: size of stack memory structures', 'kB', 'memstats', 'expvar.memstats.stack', 'line'],
+ 'options': ['stack', 'memory: size of stack memory structures', 'kB', 'memstats',
+ 'expvar.memstats.stack', 'line'],
'lines': [
['memstats_stack_inuse', 'inuse', 'absolute', 1, 1024]
]},
'memstats_mspan': {
- 'options': ['mspan', 'memory: size of mspan memory structures', 'kB', 'memstats', 'expvar.memstats.mspan', 'line'],
+ 'options': ['mspan', 'memory: size of mspan memory structures', 'kB', 'memstats',
+ 'expvar.memstats.mspan', 'line'],
'lines': [
['memstats_mspan_inuse', 'inuse', 'absolute', 1, 1024]
]},
'memstats_mcache': {
- 'options': ['mcache', 'memory: size of mcache memory structures', 'kB', 'memstats', 'expvar.memstats.mcache', 'line'],
+ 'options': ['mcache', 'memory: size of mcache memory structures', 'kB', 'memstats',
+ 'expvar.memstats.mcache', 'line'],
'lines': [
['memstats_mcache_inuse', 'inuse', 'absolute', 1, 1024]
]},
'memstats_live_objects': {
- 'options': ['live_objects', 'memory: number of live objects', 'objects', 'memstats', 'expvar.memstats.live_objects', 'line'],
+ 'options': ['live_objects', 'memory: number of live objects', 'objects', 'memstats',
+ 'expvar.memstats.live_objects', 'line'],
'lines': [
['memstats_live_objects', 'live']
]},
'memstats_sys': {
- 'options': ['sys', 'memory: size of reserved virtual address space', 'kB', 'memstats', 'expvar.memstats.sys', 'line'],
+ 'options': ['sys', 'memory: size of reserved virtual address space', 'kB', 'memstats',
+ 'expvar.memstats.sys', 'line'],
'lines': [
['memstats_sys', 'sys', 'absolute', 1, 1024]
]},
'memstats_gc_pauses': {
- 'options': ['gc_pauses', 'memory: average duration of GC pauses', 'ns', 'memstats', 'expvar.memstats.gc_pauses', 'line'],
+ 'options': ['gc_pauses', 'memory: average duration of GC pauses', 'ns', 'memstats',
+ 'expvar.memstats.gc_pauses', 'line'],
'lines': [
['memstats_gc_pauses', 'avg']
]},
}
-MEMSTATS_ORDER = ['memstats_heap', 'memstats_stack', 'memstats_mspan', 'memstats_mcache', 'memstats_sys', 'memstats_live_objects', 'memstats_gc_pauses']
+MEMSTATS_ORDER = ['memstats_heap', 'memstats_stack', 'memstats_mspan', 'memstats_mcache',
+ 'memstats_sys', 'memstats_live_objects', 'memstats_gc_pauses']
def flatten(d, top='', sep='.'):
@@ -71,8 +80,8 @@ class Service(UrlService):
# if memstats collection is enabled, add the charts and their order
if self.configuration.get('collect_memstats'):
- self.definitions = MEMSTATS_CHARTS
- self.order = MEMSTATS_ORDER
+ self.definitions = dict(MEMSTATS_CHARTS)
+ self.order = list(MEMSTATS_ORDER)
else:
self.definitions = dict()
self.order = list()
diff --git a/python.d/haproxy.chart.py b/python.d/haproxy.chart.py
index a9ee6665..e72698d1 100644
--- a/python.d/haproxy.chart.py
+++ b/python.d/haproxy.chart.py
@@ -10,7 +10,9 @@ try:
except ImportError:
from urllib.parse import urlparse
-from base import UrlService, SocketService
+from bases.FrameworkServices.SocketService import SocketService
+from bases.FrameworkServices.UrlService import UrlService
+
# default module values (can be overridden per job in `config`)
# update_every = 2
diff --git a/python.d/hddtemp.chart.py b/python.d/hddtemp.chart.py
index 8a98995b..577cab09 100644
--- a/python.d/hddtemp.chart.py
+++ b/python.d/hddtemp.chart.py
@@ -1,10 +1,12 @@
# -*- coding: utf-8 -*-
# Description: hddtemp netdata python.d module
# Author: Pawel Krupa (paulfantom)
-# Modified by l2isbad
+
import os
-from base import SocketService
+from copy import deepcopy
+
+from bases.FrameworkServices.SocketService import SocketService
# default module values (can be overridden per job in `config`)
#update_every = 2
@@ -22,34 +24,39 @@ retries = 60
ORDER = ['temperatures']
+CHARTS = {
+ 'temperatures': {
+ 'options': ['disks_temp', 'Disks Temperatures', 'Celsius', 'temperatures', 'hddtemp.temperatures', 'line'],
+ 'lines': [
+ # lines are created dynamically in `check()` method
+ ]}}
+
+
class Service(SocketService):
def __init__(self, configuration=None, name=None):
SocketService.__init__(self, configuration=configuration, name=name)
+ self.order = ORDER
+ self.definitions = deepcopy(CHARTS)
self._keep_alive = False
self.request = ""
self.host = "127.0.0.1"
self.port = 7634
- self.order = ORDER
- self.fahrenheit = ('Fahrenheit', lambda x: x * 9 / 5 + 32) if self.configuration.get('fahrenheit') else False
- self.whatever = ('Whatever', lambda x: x * 33 / 22 + 11) if self.configuration.get('whatever') else False
- self.choice = (choice for choice in [self.fahrenheit, self.whatever] if choice)
- self.calc = lambda x: x
- self.disks = []
+ self.disks = list()
- def _get_disks(self):
+ def get_disks(self):
try:
disks = self.configuration['devices']
- self.info("Using configured disks" + str(disks))
- except (KeyError, TypeError) as e:
+ self.info("Using configured disks {0}".format(disks))
+ except (KeyError, TypeError):
self.info("Autodetecting disks")
return ["/dev/" + f for f in os.listdir("/dev") if len(f) == 3 and f.startswith("sd")]
- ret = []
+ ret = list()
for disk in disks:
if not disk.startswith('/dev/'):
disk = "/dev/" + disk
ret.append(disk)
- if len(ret) == 0:
+ if not ret:
self.error("Provided disks cannot be found in /dev directory.")
return ret
@@ -59,10 +66,9 @@ class Service(SocketService):
if all(disk in data for disk in self.disks):
return True
-
return False
- def _get_data(self):
+ def get_data(self):
"""
Get data from TCP/IP socket
:return: dict
@@ -72,21 +78,20 @@ class Service(SocketService):
except AttributeError:
self.error("no data received")
return None
- data = {}
+ data = dict()
for i in range(len(raw) // 5):
if not raw[i*5+1] in self.disks:
continue
try:
- val = self.calc(int(raw[i*5+3]))
+ val = int(raw[i*5+3])
except ValueError:
val = 0
data[raw[i*5+1].replace("/dev/", "")] = val
- if len(data) == 0:
+ if not data:
self.error("received data doesn't have needed records")
return None
- else:
- return data
+ return data
def check(self):
"""
@@ -94,27 +99,12 @@ class Service(SocketService):
:return: boolean
"""
self._parse_config()
- self.disks = self._get_disks()
+ self.disks = self.get_disks()
- data = self._get_data()
+ data = self.get_data()
if data is None:
return False
- self.definitions = {
- 'temperatures': {
- 'options': ['disks_temp', 'Disks Temperatures', 'temperatures', 'hddtemp.temperatures', 'line'],
- 'lines': [
- # lines are created dynamically in `check()` method
- ]}
- }
- try:
- self.choice = next(self.choice)
- except StopIteration:
- self.definitions[ORDER[0]]['options'].insert(2, 'Celsius')
- else:
- self.calc = self.choice[1]
- self.definitions[ORDER[0]]['options'].insert(2, self.choice[0])
-
for name in data:
- self.definitions[ORDER[0]]['lines'].append([name])
+ self.definitions['temperatures']['lines'].append([name])
return True
diff --git a/python.d/ipfs.chart.py b/python.d/ipfs.chart.py
index eaea3c5e..43500dfb 100644
--- a/python.d/ipfs.chart.py
+++ b/python.d/ipfs.chart.py
@@ -2,9 +2,10 @@
# Description: IPFS netdata python.d module
# Authors: Pawel Krupa (paulfantom), davidak
-from base import UrlService
import json
+from bases.FrameworkServices.UrlService import UrlService
+
# default module values (can be overridden per job in `config`)
# update_every = 2
priority = 60000
@@ -49,75 +50,75 @@ CHARTS = {
}
SI_zeroes = {'k': 3, 'm': 6, 'g': 9, 't': 12,
- 'p': 15, 'e': 18, 'z': 21, 'y': 24 }
+ 'p': 15, 'e': 18, 'z': 21, 'y': 24}
class Service(UrlService):
def __init__(self, configuration=None, name=None):
UrlService.__init__(self, configuration=configuration, name=name)
- try:
- self.baseurl = str(self.configuration['url'])
- except (KeyError, TypeError):
- self.baseurl = "http://localhost:5001"
+ self.baseurl = self.configuration.get('url', 'http://localhost:5001')
self.order = ORDER
self.definitions = CHARTS
- self.__storagemax = None
+ self.__storage_max = None
- def _get_json(self, suburl):
+ def _get_json(self, sub_url):
"""
:return: json decoding of the specified url
"""
- self.url = self.baseurl + suburl
+ self.url = self.baseurl + sub_url
try:
return json.loads(self._get_raw_data())
- except:
- return {}
+ except (TypeError, ValueError):
+ return dict()
- def _recursive_pins(self, keys):
+ @staticmethod
+ def _recursive_pins(keys):
return len([k for k in keys if keys[k]["Type"] == b"recursive"])
- def _dehumanize(self, storemax):
+ @staticmethod
+ def _dehumanize(store_max):
# convert from '10Gb' to 10000000000
- if type(storemax) != int:
- storemax = storemax.lower()
- if storemax.endswith('b'):
- val, units = storemax[:-2], storemax[-2]
+ if not isinstance(store_max, int):
+ store_max = store_max.lower()
+ if store_max.endswith('b'):
+ val, units = store_max[:-2], store_max[-2]
if units in SI_zeroes:
val += '0'*SI_zeroes[units]
- storemax = val
+ store_max = val
try:
- storemax = int(storemax)
- except:
- storemax = None
- return storemax
+ store_max = int(store_max)
+ except (TypeError, ValueError):
+ store_max = None
+ return store_max
- def _storagemax(self, storecfg):
- if self.__storagemax is None:
- self.__storagemax = self._dehumanize(storecfg['StorageMax'])
- return self.__storagemax
+ def _storagemax(self, store_cfg):
+ if self.__storage_max is None:
+ self.__storage_max = self._dehumanize(store_cfg['StorageMax'])
+ return self.__storage_max
def _get_data(self):
"""
Get data from API
:return: dict
"""
- cfg = { # suburl : List of (result-key, original-key, transform-func)
- '/api/v0/stats/bw' :[('in', 'RateIn', int ),
- ('out', 'RateOut', int )],
- '/api/v0/swarm/peers':[('peers', 'Strings', len )],
- '/api/v0/stats/repo' :[('size', 'RepoSize', int),
- ('objects', 'NumObjects', int)],
- '/api/v0/pin/ls': [('pinned', 'Keys', len),
- ('recursive_pins', 'Keys', self._recursive_pins)],
- '/api/v0/config/show': [('avail', 'Datastore', self._storagemax)]
+ # suburl : List of (result-key, original-key, transform-func)
+ cfg = {
+ '/api/v0/stats/bw':
+ [('in', 'RateIn', int), ('out', 'RateOut', int)],
+ '/api/v0/swarm/peers':
+ [('peers', 'Strings', len)],
+ '/api/v0/stats/repo':
+ [('size', 'RepoSize', int), ('objects', 'NumObjects', int)],
+ '/api/v0/pin/ls':
+ [('pinned', 'Keys', len), ('recursive_pins', 'Keys', self._recursive_pins)],
+ '/api/v0/config/show': [('avail', 'Datastore', self._storagemax)]
}
- r = {}
+ r = dict()
for suburl in cfg:
- json = self._get_json(suburl)
- for newkey, origkey, xmute in cfg[suburl]:
+ in_json = self._get_json(suburl)
+ for new_key, orig_key, xmute in cfg[suburl]:
try:
- r[newkey] = xmute(json[origkey])
- except: pass
+ r[new_key] = xmute(in_json[orig_key])
+ except Exception:
+ continue
return r or None
-
-
diff --git a/python.d/isc_dhcpd.chart.py b/python.d/isc_dhcpd.chart.py
index a437f803..60995342 100644
--- a/python.d/isc_dhcpd.chart.py
+++ b/python.d/isc_dhcpd.chart.py
@@ -7,14 +7,15 @@ from os import stat, access, R_OK
from os.path import isfile
try:
from ipaddress import ip_network, ip_address
- have_ipaddress = True
+ HAVE_IPADDRESS = True
except ImportError:
- have_ipaddress = False
+ HAVE_IPADDRESS = False
try:
from itertools import filterfalse
except ImportError:
from itertools import ifilterfalse as filterfalse
-from base import SimpleService
+
+from bases.FrameworkServices.SimpleService import SimpleService
priority = 60000
retries = 60
@@ -55,11 +56,10 @@ class Service(SimpleService):
# Will work only with 'default' db-time-format (weekday year/month/day hour:minute:second)
# TODO: update algorithm to parse correctly 'local' db-time-format
- # (epoch <seconds-since-epoch>; # <day-name> <month-name> <day-number> <hours>:<minutes>:<seconds> <year>)
# Also only ipv4 supported
def check(self):
- if not have_ipaddress:
+ if not HAVE_IPADDRESS:
self.error('\'python-ipaddress\' module is needed')
return False
if not (isfile(self.leases_path) and access(self.leases_path, R_OK)):
@@ -146,7 +146,16 @@ class Service(SimpleService):
def binding_active(lease_end_time, current_time):
- return mktime(strptime(lease_end_time, '%w %Y/%m/%d %H:%M:%S')) - current_time > 0
+ # lease_end_time might be epoch
+ if lease_end_time.startswith('epoch'):
+ epoch = int(lease_end_time.split()[1].replace(';',''))
+ return epoch - current_time > 0
+ # max. int for lease-time causes lease to expire in year 2038.
+ # dhcpd puts 'never' in the ends section of active lease
+ elif lease_end_time == 'never':
+ return True
+ else:
+ return mktime(strptime(lease_end_time, '%w %Y/%m/%d %H:%M:%S')) - current_time > 0
def find_lease(value):
@@ -155,4 +164,3 @@ def find_lease(value):
def find_ends(value):
return value[2:6] != 'ends'
-
diff --git a/python.d/mdstat.chart.py b/python.d/mdstat.chart.py
index 7ce7b193..794d2564 100644
--- a/python.d/mdstat.chart.py
+++ b/python.d/mdstat.chart.py
@@ -2,9 +2,10 @@
# Description: mdstat netdata python.d module
# Author: l2isbad
-from re import compile as re_compile
from collections import defaultdict
-from base import SimpleService
+from re import compile as re_compile
+
+from bases.FrameworkServices.SimpleService import SimpleService
priority = 60000
retries = 60
@@ -18,7 +19,7 @@ class Service(SimpleService):
SimpleService.__init__(self, configuration=configuration, name=name)
self.regex = dict(disks=re_compile(r' (?P<array>[a-zA-Z_0-9]+) : active .+\['
r'(?P<total_disks>[0-9]+)/'
- r'(?P<inuse_disks>[0-9])\]'),
+ r'(?P<inuse_disks>[0-9]+)\]'),
status=re_compile(r' (?P<array>[a-zA-Z_0-9]+) : active .+ '
r'(?P<operation>[a-z]+) =[ ]{1,2}'
r'(?P<operation_status>[0-9.]+).+finish='
diff --git a/python.d/memcached.chart.py b/python.d/memcached.chart.py
index 0d6807ba..4f7adfa2 100644
--- a/python.d/memcached.chart.py
+++ b/python.d/memcached.chart.py
@@ -2,7 +2,7 @@
# Description: memcached netdata python.d module
# Author: Pawel Krupa (paulfantom)
-from base import SocketService
+from bases.FrameworkServices.SocketService import SocketService
# default module values (can be overridden per job in `config`)
#update_every = 2
@@ -149,9 +149,8 @@ class Service(SocketService):
data[t[0]] = t[1]
except (IndexError, ValueError):
self.debug("invalid line received: " + str(line))
- pass
- if len(data) == 0:
+ if not data:
self.error("received data doesn't have any records")
return None
@@ -159,7 +158,7 @@ class Service(SocketService):
try:
data['avail'] = int(data['limit_maxbytes']) - int(data['bytes'])
data['used'] = int(data['bytes'])
- except:
+ except (KeyError, ValueError, TypeError):
pass
return data
@@ -178,11 +177,7 @@ class Service(SocketService):
:return: boolean
"""
self._parse_config()
- if self.name == "":
- self.name = "local"
- self.chart_name += "_" + self.name
data = self._get_data()
if data is None:
return False
-
return True
diff --git a/python.d/mongodb.chart.py b/python.d/mongodb.chart.py
index bb4c44b0..909a419d 100644
--- a/python.d/mongodb.chart.py
+++ b/python.d/mongodb.chart.py
@@ -2,7 +2,6 @@
# Description: mongodb netdata python.d module
# Author: l2isbad
-from base import SimpleService
from copy import deepcopy
from datetime import datetime
from sys import exc_info
@@ -14,6 +13,8 @@ try:
except ImportError:
PYMONGO = False
+from bases.FrameworkServices.SimpleService import SimpleService
+
# default module values (can be overridden per job in `config`)
# update_every = 2
priority = 60000
@@ -36,6 +37,7 @@ REPL_SET_STATES = [
def multiply_by_100(value):
return value * 100
+
DEFAULT_METRICS = [
('opcounters.delete', None, None),
('opcounters.update', None, None),
diff --git a/python.d/mysql.chart.py b/python.d/mysql.chart.py
index 6118f79f..4c7058b2 100644
--- a/python.d/mysql.chart.py
+++ b/python.d/mysql.chart.py
@@ -2,11 +2,11 @@
# Description: MySQL netdata python.d module
# Author: Pawel Krupa (paulfantom)
-from base import MySQLService
+from bases.FrameworkServices.MySQLService import MySQLService
# default module values (can be overridden per job in `config`)
# update_every = 3
-priority = 90000
+priority = 60000
retries = 60
# query executed on MySQL server
@@ -114,7 +114,16 @@ GLOBAL_STATS = [
'Connection_errors_max_connections',
'Connection_errors_peer_address',
'Connection_errors_select',
- 'Connection_errors_tcpwrap']
+ 'Connection_errors_tcpwrap',
+ 'wsrep_local_recv_queue',
+ 'wsrep_local_send_queue',
+ 'wsrep_received',
+ 'wsrep_replicated',
+ 'wsrep_received_bytes',
+ 'wsrep_replicated_bytes',
+ 'wsrep_local_bf_aborts',
+ 'wsrep_local_cert_failures',
+ 'wsrep_flow_control_paused_ns']
def slave_seconds(value):
try:
@@ -122,6 +131,7 @@ def slave_seconds(value):
except (TypeError, ValueError):
return -1
+
def slave_running(value):
return 1 if value == 'Yes' else -1
@@ -146,7 +156,8 @@ ORDER = ['net',
'innodb_buffer_pool_read_ahead', 'innodb_buffer_pool_reqs', 'innodb_buffer_pool_ops',
'qcache_ops', 'qcache', 'qcache_freemem', 'qcache_memblocks',
'key_blocks', 'key_requests', 'key_disk_ops',
- 'files', 'files_rate', 'slave_behind', 'slave_status']
+ 'files', 'files_rate', 'slave_behind', 'slave_status',
+ 'galera_writesets', 'galera_bytes', 'galera_queue', 'galera_conflicts', 'galera_flow_control']
CHARTS = {
'net': {
@@ -248,7 +259,8 @@ CHARTS = {
['Innodb_data_fsyncs', 'fsyncs', 'incremental']
]},
'innodb_io_pending_ops': {
- 'options': [None, 'mysql InnoDB Pending I/O Operations', 'operations', 'innodb', 'mysql.innodb_io_pending_ops', 'line'],
+ 'options': [None, 'mysql InnoDB Pending I/O Operations', 'operations', 'innodb',
+ 'mysql.innodb_io_pending_ops', 'line'],
'lines': [
['Innodb_data_pending_reads', 'reads', 'absolute'],
['Innodb_data_pending_writes', 'writes', 'absolute', -1, 1],
@@ -274,7 +286,8 @@ CHARTS = {
['Innodb_os_log_written', 'write', 'incremental', -1, 1024],
]},
'innodb_cur_row_lock': {
- 'options': [None, 'mysql InnoDB Current Row Locks', 'operations', 'innodb', 'mysql.innodb_cur_row_lock', 'area'],
+ 'options': [None, 'mysql InnoDB Current Row Locks', 'operations', 'innodb',
+ 'mysql.innodb_cur_row_lock', 'area'],
'lines': [
['Innodb_row_lock_current_waits', 'current_waits', 'absolute']
]},
@@ -287,7 +300,8 @@ CHARTS = {
['Innodb_rows_deleted', 'deleted', 'incremental', -1, 1],
]},
'innodb_buffer_pool_pages': {
- 'options': [None, 'mysql InnoDB Buffer Pool Pages', 'pages', 'innodb', 'mysql.innodb_buffer_pool_pages', 'line'],
+ 'options': [None, 'mysql InnoDB Buffer Pool Pages', 'pages', 'innodb',
+ 'mysql.innodb_buffer_pool_pages', 'line'],
'lines': [
['Innodb_buffer_pool_pages_data', 'data', 'absolute'],
['Innodb_buffer_pool_pages_dirty', 'dirty', 'absolute', -1, 1],
@@ -303,20 +317,23 @@ CHARTS = {
['Innodb_buffer_pool_bytes_dirty', 'dirty', 'absolute', -1, 1024 * 1024]
]},
'innodb_buffer_pool_read_ahead': {
- 'options': [None, 'mysql InnoDB Buffer Pool Read Ahead', 'operations/s', 'innodb', 'mysql.innodb_buffer_pool_read_ahead', 'area'],
+ 'options': [None, 'mysql InnoDB Buffer Pool Read Ahead', 'operations/s', 'innodb',
+ 'mysql.innodb_buffer_pool_read_ahead', 'area'],
'lines': [
['Innodb_buffer_pool_read_ahead', 'all', 'incremental'],
['Innodb_buffer_pool_read_ahead_evicted', 'evicted', 'incremental', -1, 1],
['Innodb_buffer_pool_read_ahead_rnd', 'random', 'incremental']
]},
'innodb_buffer_pool_reqs': {
- 'options': [None, 'mysql InnoDB Buffer Pool Requests', 'requests/s', 'innodb', 'mysql.innodb_buffer_pool_reqs', 'area'],
+ 'options': [None, 'mysql InnoDB Buffer Pool Requests', 'requests/s', 'innodb',
+ 'mysql.innodb_buffer_pool_reqs', 'area'],
'lines': [
['Innodb_buffer_pool_read_requests', 'reads', 'incremental'],
['Innodb_buffer_pool_write_requests', 'writes', 'incremental', -1, 1]
]},
'innodb_buffer_pool_ops': {
- 'options': [None, 'mysql InnoDB Buffer Pool Operations', 'operations/s', 'innodb', 'mysql.innodb_buffer_pool_ops', 'area'],
+ 'options': [None, 'mysql InnoDB Buffer Pool Operations', 'operations/s', 'innodb',
+ 'mysql.innodb_buffer_pool_ops', 'area'],
'lines': [
['Innodb_buffer_pool_reads', 'disk reads', 'incremental'],
['Innodb_buffer_pool_wait_free', 'wait free', 'incremental', -1, 1]
@@ -359,7 +376,8 @@ CHARTS = {
['Key_write_requests', 'writes', 'incremental', -1, 1]
]},
'key_disk_ops': {
- 'options': [None, 'mysql MyISAM Key Cache Disk Operations', 'operations/s', 'myisam', 'mysql.key_disk_ops', 'area'],
+ 'options': [None, 'mysql MyISAM Key Cache Disk Operations', 'operations/s',
+ 'myisam', 'mysql.key_disk_ops', 'area'],
'lines': [
['Key_reads', 'reads', 'incremental'],
['Key_writes', 'writes', 'incremental', -1, 1]
@@ -375,13 +393,15 @@ CHARTS = {
['Opened_files', 'files', 'incremental']
]},
'binlog_stmt_cache': {
- 'options': [None, 'mysql Binlog Statement Cache', 'statements/s', 'binlog', 'mysql.binlog_stmt_cache', 'line'],
+ 'options': [None, 'mysql Binlog Statement Cache', 'statements/s', 'binlog',
+ 'mysql.binlog_stmt_cache', 'line'],
'lines': [
['Binlog_stmt_cache_disk_use', 'disk', 'incremental'],
['Binlog_stmt_cache_use', 'all', 'incremental']
]},
'connection_errors': {
- 'options': [None, 'mysql Connection Errors', 'connections/s', 'connections', 'mysql.connection_errors', 'line'],
+ 'options': [None, 'mysql Connection Errors', 'connections/s', 'connections',
+ 'mysql.connection_errors', 'line'],
'lines': [
['Connection_errors_accept', 'accept', 'incremental'],
['Connection_errors_internal', 'internal', 'incremental'],
@@ -400,6 +420,35 @@ CHARTS = {
'lines': [
['Slave_SQL_Running', 'sql_running', 'absolute'],
['Slave_IO_Running', 'io_running', 'absolute']
+ ]},
+ 'galera_writesets': {
+ 'options': [None, 'Replicated writesets', 'writesets/s', 'galera', 'mysql.galera_writesets', 'line'],
+ 'lines': [
+ ['wsrep_received', 'rx', 'incremental'],
+ ['wsrep_replicated', 'tx', 'incremental', -1, 1],
+ ]},
+ 'galera_bytes': {
+ 'options': [None, 'Replicated bytes', 'KB/s', 'galera', 'mysql.galera_bytes', 'area'],
+ 'lines': [
+ ['wsrep_received_bytes', 'rx', 'incremental', 1, 1024],
+ ['wsrep_replicated_bytes', 'tx', 'incremental', -1, 1024],
+ ]},
+ 'galera_queue': {
+ 'options': [None, 'Galera queue', 'writesets', 'galera', 'mysql.galera_queue', 'line'],
+ 'lines': [
+ ['wsrep_local_recv_queue', 'rx', 'absolute'],
+ ['wsrep_local_send_queue', 'tx', 'absolute', -1, 1],
+ ]},
+ 'galera_conflicts': {
+ 'options': [None, 'Replication conflicts', 'transactions', 'galera', 'mysql.galera_conflicts', 'area'],
+ 'lines': [
+ ['wsrep_local_bf_aborts', 'bf_aborts', 'incremental'],
+ ['wsrep_local_cert_failures', 'cert_fails', 'incremental', -1, 1],
+ ]},
+ 'galera_flow_control': {
+ 'options': [None, 'Flow control', 'millisec', 'galera', 'mysql.galera_flow_control', 'area'],
+ 'lines': [
+ ['wsrep_flow_control_paused_ns', 'paused', 'incremental', 1, 1000000],
]}
}
@@ -416,7 +465,7 @@ class Service(MySQLService):
raw_data = self._get_raw_data(description=True)
if not raw_data:
- return None
+ return None
to_netdata = dict()
@@ -426,14 +475,15 @@ class Service(MySQLService):
if key in global_status:
to_netdata[key] = global_status[key]
if 'Threads_created' in to_netdata and 'Connections' in to_netdata:
- to_netdata['Thread_cache_misses'] = round(int(to_netdata['Threads_created']) / float(to_netdata['Connections']) * 10000)
+ to_netdata['Thread_cache_misses'] = round(int(to_netdata['Threads_created'])
+ / float(to_netdata['Connections']) * 10000)
if 'slave_status' in raw_data:
if raw_data['slave_status'][0]:
slave_raw_data = dict(zip([e[0] for e in raw_data['slave_status'][1]], raw_data['slave_status'][0][0]))
- for key, function in SLAVE_STATS:
+ for key, func in SLAVE_STATS:
if key in slave_raw_data:
- to_netdata[key] = function(slave_raw_data[key])
+ to_netdata[key] = func(slave_raw_data[key])
else:
self.queries.pop('slave_status')
diff --git a/python.d/nginx.chart.py b/python.d/nginx.chart.py
index 88849a92..2e4f0d1b 100644
--- a/python.d/nginx.chart.py
+++ b/python.d/nginx.chart.py
@@ -2,7 +2,7 @@
# Description: nginx netdata python.d module
# Author: Pawel Krupa (paulfantom)
-from base import UrlService
+from bases.FrameworkServices.UrlService import UrlService
# default module values (can be overridden per job in `config`)
# update_every = 2
@@ -22,7 +22,8 @@ ORDER = ['connections', 'requests', 'connection_status', 'connect_rate']
CHARTS = {
'connections': {
- 'options': [None, 'nginx Active Connections', 'connections', 'active connections', 'nginx.connections', 'line'],
+ 'options': [None, 'nginx Active Connections', 'connections', 'active connections',
+ 'nginx.connections', 'line'],
'lines': [
["active"]
]},
@@ -32,14 +33,16 @@ CHARTS = {
["requests", None, 'incremental']
]},
'connection_status': {
- 'options': [None, 'nginx Active Connections by Status', 'connections', 'status', 'nginx.connection_status', 'line'],
+ 'options': [None, 'nginx Active Connections by Status', 'connections', 'status',
+ 'nginx.connection_status', 'line'],
'lines': [
["reading"],
["writing"],
["waiting", "idle"]
]},
'connect_rate': {
- 'options': [None, 'nginx Connections Rate', 'connections/s', 'connections rate', 'nginx.connect_rate', 'line'],
+ 'options': [None, 'nginx Connections Rate', 'connections/s', 'connections rate',
+ 'nginx.connect_rate', 'line'],
'lines': [
["accepts", "accepted", "incremental"],
["handled", None, "incremental"]
@@ -50,8 +53,7 @@ CHARTS = {
class Service(UrlService):
def __init__(self, configuration=None, name=None):
UrlService.__init__(self, configuration=configuration, name=name)
- if len(self.url) == 0:
- self.url = "http://localhost/stub_status"
+ self.url = self.configuration.get('url', 'http://localhost/stub_status')
self.order = ORDER
self.definitions = CHARTS
diff --git a/python.d/nsd.chart.py b/python.d/nsd.chart.py
index 68bb4f23..499dfda2 100644
--- a/python.d/nsd.chart.py
+++ b/python.d/nsd.chart.py
@@ -2,10 +2,10 @@
# Description: NSD `nsd-control stats_noreset` netdata python.d module
# Author: <383c57 at gmail.com>
-
-from base import ExecutableService
import re
+from bases.FrameworkServices.ExecutableService import ExecutableService
+
# default module values (can be overridden per job in `config`)
priority = 60000
retries = 5
diff --git a/python.d/ovpn_status_log.chart.py b/python.d/ovpn_status_log.chart.py
index 3a7e8200..519c77fa 100644
--- a/python.d/ovpn_status_log.chart.py
+++ b/python.d/ovpn_status_log.chart.py
@@ -3,8 +3,8 @@
# Author: l2isbad
from re import compile as r_compile
-from collections import defaultdict
-from base import SimpleService
+
+from bases.FrameworkServices.SimpleService import SimpleService
priority = 60000
retries = 60
@@ -34,24 +34,30 @@ class Service(SimpleService):
self.log_path = self.configuration.get('log_path')
self.regex = dict(tls=r_compile(r'\d{1,3}(?:\.\d{1,3}){3}(?::\d+)? (?P<bytes_in>\d+) (?P<bytes_out>\d+)'),
static_key=r_compile(r'TCP/[A-Z]+ (?P<direction>(?:read|write)) bytes,(?P<bytes>\d+)'))
- self.to_netdata = dict(bytes_in=0, bytes_out=0)
def check(self):
if not (self.log_path and isinstance(self.log_path, str)):
- self.error('\'log_path\' is not defined')
+ self.error("'log_path' is not defined")
return False
- data = False
- for method in (self._get_data_tls, self._get_data_static_key):
- data = method()
- if data:
- self._get_data = method
- self._data_from_check = data
- break
+ data = self._get_raw_data()
+ if not data:
+ self.error('Make sure that the openvpn status log file exists and netdata has permission to read it')
+ return None
- if data:
+ found = None
+ for row in data:
+ if 'ROUTING' in row:
+ self.get_data = self.get_data_tls
+ found = True
+ break
+ elif 'STATISTICS' in row:
+ self.get_data = self.get_data_static_key
+ found = True
+ break
+ if found:
return True
- self.error('Make sure that the openvpn status log file exists and netdata has permission to read it')
+ self.error("Failed to parse ovpenvpn log file")
return False
def _get_raw_data(self):
@@ -68,7 +74,7 @@ class Service(SimpleService):
else:
return raw_data
- def _get_data_static_key(self):
+ def get_data_static_key(self):
"""
Parse openvpn-status log file.
"""
@@ -77,7 +83,7 @@ class Service(SimpleService):
if not raw_data:
return None
- data = defaultdict(lambda: 0)
+ data = dict(bytes_in=0, bytes_out=0)
for row in raw_data:
match = self.regex['static_key'].search(row)
@@ -90,7 +96,7 @@ class Service(SimpleService):
return data or None
- def _get_data_tls(self):
+ def get_data_tls(self):
"""
Parse openvpn-status log file.
"""
@@ -99,7 +105,7 @@ class Service(SimpleService):
if not raw_data:
return None
- data = defaultdict(lambda: 0)
+ data = dict(users=0, bytes_in=0, bytes_out=0)
for row in raw_data:
row = ' '.join(row.split(',')) if ',' in row else ' '.join(row.split())
match = self.regex['tls'].search(row)
@@ -110,4 +116,3 @@ class Service(SimpleService):
data['bytes_out'] += int(match['bytes_out'])
return data or None
-
diff --git a/python.d/phpfpm.chart.py b/python.d/phpfpm.chart.py
index 7a983521..ea7a9a7e 100644
--- a/python.d/phpfpm.chart.py
+++ b/python.d/phpfpm.chart.py
@@ -2,10 +2,11 @@
# Description: PHP-FPM netdata python.d module
# Author: Pawel Krupa (paulfantom)
-from base import UrlService
import json
import re
+from bases.FrameworkServices.UrlService import UrlService
+
# default module values (can be overridden per job in `config`)
# update_every = 2
priority = 60000
@@ -40,6 +41,7 @@ PER_PROCESS_INFO = [
def average(collection):
return sum(collection, 0.0) / max(len(collection), 1)
+
CALC = [
('min', min),
('max', max),
@@ -130,8 +132,8 @@ class Service(UrlService):
if p_info:
for new_name in PER_PROCESS_INFO:
- for name, function in CALC:
- to_netdata[name + new_name[1]] = function([p_info[k] for k in p_info if new_name[1] in k])
+ for name, func in CALC:
+ to_netdata[name + new_name[1]] = func([p_info[k] for k in p_info if new_name[1] in k])
return to_netdata or None
@@ -165,4 +167,3 @@ def parse_raw_data_(is_json, regex, raw_data):
else:
raw_data = ' '.join(raw_data.split())
return dict(regex.findall(raw_data))
-
diff --git a/python.d/postfix.chart.py b/python.d/postfix.chart.py
index ee4142aa..a2129e4b 100644
--- a/python.d/postfix.chart.py
+++ b/python.d/postfix.chart.py
@@ -2,7 +2,7 @@
# Description: postfix netdata python.d module
# Author: Pawel Krupa (paulfantom)
-from base import ExecutableService
+from bases.FrameworkServices.ExecutableService import ExecutableService
# default module values (can be overridden per job in `config`)
# update_every = 2
diff --git a/python.d/postgres.chart.py b/python.d/postgres.chart.py
index b17565e9..ef69a9c7 100644
--- a/python.d/postgres.chart.py
+++ b/python.d/postgres.chart.py
@@ -13,11 +13,11 @@ try:
except ImportError:
PSYCOPG2 = False
-from base import SimpleService
+from bases.FrameworkServices.SimpleService import SimpleService
# default module values
update_every = 1
-priority = 90000
+priority = 60000
retries = 60
METRICS = dict(
@@ -62,7 +62,7 @@ SELECT
CAST(COALESCE(SUM(CAST(archive_file ~ $r$\.ready$$r$ as INT)), 0) AS INT) AS ready_count,
CAST(COALESCE(SUM(CAST(archive_file ~ $r$\.done$$r$ AS INT)), 0) AS INT) AS done_count
FROM
- pg_catalog.pg_ls_dir('pg_xlog/archive_status') AS archive_files (archive_file);
+ pg_catalog.pg_ls_dir('{0}/archive_status') AS archive_files (archive_file);
""",
BACKENDS="""
SELECT
@@ -125,7 +125,11 @@ AND NOT datname ~* '^template\d+';
""",
IF_SUPERUSER="""
SELECT current_setting('is_superuser') = 'on' AS is_superuser;
- """)
+ """,
+ DETECT_SERVER_VERSION="""
+SHOW server_version_num;
+ """
+)
QUERY_STATS = {
@@ -221,7 +225,7 @@ CHARTS = {
class Service(SimpleService):
def __init__(self, configuration=None, name=None):
- super(self.__class__, self).__init__(configuration=configuration, name=name)
+ SimpleService.__init__(self, configuration=configuration, name=name)
self.order = ORDER[:]
self.definitions = deepcopy(CHARTS)
self.table_stats = configuration.pop('table_stats', False)
@@ -229,6 +233,7 @@ class Service(SimpleService):
self.database_poll = configuration.pop('database_poll', None)
self.configuration = configuration
self.connection = False
+ self.server_version = None
self.data = dict()
self.locks_zeroed = dict()
self.databases = list()
@@ -257,17 +262,20 @@ class Service(SimpleService):
return False
result, error = self._connect()
if not result:
- conf = dict([(k, (lambda k, v: v if k != 'password' else '*****')(k, v)) for k, v in self.configuration.items()])
+ conf = dict((k, (lambda k, v: v if k != 'password' else '*****')(k, v))
+ for k, v in self.configuration.items())
self.error('Failed to connect to %s. Error: %s' % (str(conf), error))
return False
try:
cursor = self.connection.cursor()
self.databases = discover_databases_(cursor, QUERIES['FIND_DATABASES'])
is_superuser = check_if_superuser_(cursor, QUERIES['IF_SUPERUSER'])
+ self.server_version = detect_server_version(cursor, QUERIES['DETECT_SERVER_VERSION'])
cursor.close()
- if (self.database_poll and isinstance(self.database_poll, str)):
- self.databases = [dbase for dbase in self.databases if dbase in self.database_poll.split()] or self.databases
+ if self.database_poll and isinstance(self.database_poll, str):
+ self.databases = [dbase for dbase in self.databases if dbase in self.database_poll.split()]\
+ or self.databases
self.locks_zeroed = populate_lock_types(self.databases)
self.add_additional_queries_(is_superuser)
@@ -284,7 +292,11 @@ class Service(SimpleService):
self.queries[QUERIES['TABLE_STATS']] = METRICS['TABLE_STATS']
if is_superuser:
self.queries[QUERIES['BGWRITER']] = METRICS['BGWRITER']
- self.queries[QUERIES['ARCHIVE']] = METRICS['ARCHIVE']
+ if self.server_version >= 100000:
+ wal_dir_name = 'pg_wal'
+ else:
+ wal_dir_name = 'pg_xlog'
+ self.queries[QUERIES['ARCHIVE'].format(wal_dir_name)] = METRICS['ARCHIVE']
def create_dynamic_charts_(self):
@@ -340,6 +352,9 @@ def check_if_superuser_(cursor, query):
cursor.execute(query)
return cursor.fetchone()[0]
+def detect_server_version(cursor, query):
+ cursor.execute(query)
+ return int(cursor.fetchone()[0])
def populate_lock_types(databases):
result = dict()
diff --git a/python.d/powerdns.chart.py b/python.d/powerdns.chart.py
new file mode 100644
index 00000000..a8d2f399
--- /dev/null
+++ b/python.d/powerdns.chart.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+# Description: powerdns netdata python.d module
+# Author: l2isbad
+
+from json import loads
+
+from bases.FrameworkServices.UrlService import UrlService
+
+priority = 60000
+retries = 60
+# update_every = 3
+
+ORDER = ['questions', 'cache_usage', 'cache_size', 'latency']
+CHARTS = {
+ 'questions': {
+ 'options': [None, 'PowerDNS Queries and Answers', 'count', 'questions', 'powerdns.questions', 'line'],
+ 'lines': [
+ ['udp-queries', None, 'incremental'],
+ ['udp-answers', None, 'incremental'],
+ ['tcp-queries', None, 'incremental'],
+ ['tcp-answers', None, 'incremental']
+ ]},
+ 'cache_usage': {
+ 'options': [None, 'PowerDNS Cache Usage', 'count', 'cache', 'powerdns.cache_usage', 'line'],
+ 'lines': [
+ ['query-cache-hit', None, 'incremental'],
+ ['query-cache-miss', None, 'incremental'],
+ ['packetcache-hit', 'packet-cache-hit', 'incremental'],
+ ['packetcache-miss', 'packet-cache-miss', 'incremental']
+ ]},
+ 'cache_size': {
+ 'options': [None, 'PowerDNS Cache Size', 'count', 'cache', 'powerdns.cache_size', 'line'],
+ 'lines': [
+ ['query-cache-size', None, 'absolute'],
+ ['packetcache-size', 'packet-cache-size', 'absolute'],
+ ['key-cache-size', None, 'absolute'],
+ ['meta-cache-size', None, 'absolute']
+ ]},
+ 'latency': {
+ 'options': [None, 'PowerDNS Latency', 'microseconds', 'latency', 'powerdns.latency', 'line'],
+ 'lines': [
+ ['latency', None, 'absolute']
+ ]}
+
+}
+
+
+class Service(UrlService):
+ def __init__(self, configuration=None, name=None):
+ UrlService.__init__(self, configuration=configuration, name=name)
+ self.order = ORDER
+ self.definitions = CHARTS
+
+ def _get_data(self):
+ data = self._get_raw_data()
+ if not data:
+ return None
+ return dict((d['name'], d['value']) for d in loads(data))
diff --git a/python.d/python_modules/base.py b/python.d/python_modules/base.py
index 1d5417ec..7c6e1d2f 100644
--- a/python.d/python_modules/base.py
+++ b/python.d/python_modules/base.py
@@ -1,1119 +1,9 @@
# -*- coding: utf-8 -*-
-# Description: netdata python modules framework
-# Author: Pawel Krupa (paulfantom)
-
-# Remember:
-# ALL CODE NEEDS TO BE COMPATIBLE WITH Python > 2.7 and Python > 3.1
-# Follow PEP8 as much as it is possible
-# "check" and "create" CANNOT be blocking.
-# "update" CAN be blocking
-# "update" function needs to be fast, so follow:
-# https://wiki.python.org/moin/PythonSpeed/PerformanceTips
-# basically:
-# - use local variables wherever it is possible
-# - avoid dots in expressions that are executed many times
-# - use "join()" instead of "+"
-# - use "import" only at the beginning
-#
-# using ".encode()" in one thread can block other threads as well (only in python2)
-
-import os
-import re
-import socket
-import time
-import threading
-
-import urllib3
-
-from glob import glob
-from subprocess import Popen, PIPE
-from sys import exc_info
-
-try:
- import MySQLdb
- PY_MYSQL = True
-except ImportError:
- try:
- import pymysql as MySQLdb
- PY_MYSQL = True
- except ImportError:
- PY_MYSQL = False
-
-import msg
-
-
-PATH = os.getenv('PATH', '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin').split(':')
-try:
- urllib3.disable_warnings()
-except AttributeError:
- msg.error('urllib3: warnings were not disabled')
-
-
-# class BaseService(threading.Thread):
-class SimpleService(threading.Thread):
- """
- Prototype of Service class.
- Implemented basic functionality to run jobs by `python.d.plugin`
- """
- def __init__(self, configuration=None, name=None):
- """
- This needs to be initialized in child classes
- :param configuration: dict
- :param name: str
- """
- threading.Thread.__init__(self)
- self._data_stream = ""
- self.daemon = True
- self.retries = 0
- self.retries_left = 0
- self.priority = 140000
- self.update_every = 1
- self.name = name
- self.override_name = None
- self.chart_name = ""
- self._dimensions = []
- self._charts = []
- self.__chart_set = False
- self.__first_run = True
- self.order = []
- self.definitions = {}
- self._data_from_check = dict()
- if configuration is None:
- self.error("BaseService: no configuration parameters supplied. Cannot create Service.")
- raise RuntimeError
- else:
- self._extract_base_config(configuration)
- self.timetable = {}
- self.create_timetable()
-
- # --- BASIC SERVICE CONFIGURATION ---
-
- def _extract_base_config(self, config):
- """
- Get basic parameters to run service
- Minimum config:
- config = {'update_every':1,
- 'priority':100000,
- 'retries':0}
- :param config: dict
- """
- pop = config.pop
- try:
- self.override_name = pop('name')
- except KeyError:
- pass
- self.update_every = int(pop('update_every'))
- self.priority = int(pop('priority'))
- self.retries = int(pop('retries'))
- self.retries_left = self.retries
- self.configuration = config
-
- def create_timetable(self, freq=None):
- """
- Create service timetable.
- `freq` is optional
- Example:
- timetable = {'last': 1466370091.3767564,
- 'next': 1466370092,
- 'freq': 1}
- :param freq: int
- """
- if freq is None:
- freq = self.update_every
- now = time.time()
- self.timetable = {'last': now,
- 'next': now - (now % freq) + freq,
- 'freq': freq}
-
- # --- THREAD CONFIGURATION ---
-
- def _run_once(self):
- """
- Executes self.update(interval) and draws run time chart.
- Return value presents exit status of update()
- :return: boolean
- """
- t_start = float(time.time())
- chart_name = self.chart_name
-
- since_last = int((t_start - self.timetable['last']) * 1000000)
- if self.__first_run:
- since_last = 0
-
- if not self.update(since_last):
- self.error("update function failed.")
- return False
-
- # draw performance graph
- run_time = int((time.time() - t_start) * 1000)
- print("BEGIN netdata.plugin_pythond_%s %s\nSET run_time = %s\nEND\n" %
- (self.chart_name, str(since_last), str(run_time)))
-
- self.debug(chart_name, "updated in", str(run_time), "ms")
- self.timetable['last'] = t_start
- self.__first_run = False
- return True
-
- def run(self):
- """
- Runs job in thread. Handles retries.
- Exits when job failed or timed out.
- :return: None
- """
- step = float(self.timetable['freq'])
- penalty = 0
- self.timetable['last'] = float(time.time() - step)
- self.debug("starting data collection - update frequency:", str(step), " retries allowed:", str(self.retries))
- while True: # run forever, unless something is wrong
- now = float(time.time())
- next = self.timetable['next'] = now - (now % step) + step + penalty
-
- # it is important to do this in a loop
- # sleep() is interruptable
- while now < next:
- self.debug("sleeping for", str(next - now), "secs to reach frequency of",
- str(step), "secs, now:", str(now), " next:", str(next), " penalty:", str(penalty))
- time.sleep(next - now)
- now = float(time.time())
-
- # do the job
- try:
- status = self._run_once()
- except Exception:
- status = False
-
- if status:
- # it is good
- self.retries_left = self.retries
- penalty = 0
- else:
- # it failed
- self.retries_left -= 1
- if self.retries_left <= 0:
- if penalty == 0:
- penalty = float(self.retries * step) / 2
- else:
- penalty *= 1.5
-
- if penalty > 600:
- penalty = 600
-
- self.retries_left = self.retries
- self.alert("failed to collect data for " + str(self.retries) +
- " times - increasing penalty to " + str(penalty) + " sec and trying again")
-
- else:
- self.error("failed to collect data - " + str(self.retries_left)
- + " retries left - penalty: " + str(penalty) + " sec")
-
- # --- CHART ---
-
- @staticmethod
- def _format(*args):
- """
- Escape and convert passed arguments.
- :param args: anything
- :return: list
- """
- params = []
- append = params.append
- for p in args:
- if p is None:
- append(p)
- continue
- if type(p) is not str:
- p = str(p)
- if ' ' in p:
- p = "'" + p + "'"
- append(p)
- return params
-
- def _line(self, instruction, *params):
- """
- Converts *params to string and joins them with one space between every one.
- Result is appended to self._data_stream
- :param params: str/int/float
- """
- tmp = list(map((lambda x: "''" if x is None or len(x) == 0 else x), params))
- self._data_stream += "%s %s\n" % (instruction, str(" ".join(tmp)))
-
- def chart(self, type_id, name="", title="", units="", family="",
- category="", chart_type="line", priority="", update_every=""):
- """
- Defines a new chart.
- :param type_id: str
- :param name: str
- :param title: str
- :param units: str
- :param family: str
- :param category: str
- :param chart_type: str
- :param priority: int/str
- :param update_every: int/str
- """
- self._charts.append(type_id)
-
- p = self._format(type_id, name, title, units, family, category, chart_type, priority, update_every)
- self._line("CHART", *p)
-
- def dimension(self, id, name=None, algorithm="absolute", multiplier=1, divisor=1, hidden=False):
- """
- Defines a new dimension for the chart
- :param id: str
- :param name: str
- :param algorithm: str
- :param multiplier: int/str
- :param divisor: int/str
- :param hidden: boolean
- :return:
- """
- try:
- int(multiplier)
- except TypeError:
- self.error("malformed dimension: multiplier is not a number:", multiplier)
- multiplier = 1
- try:
- int(divisor)
- except TypeError:
- self.error("malformed dimension: divisor is not a number:", divisor)
- divisor = 1
- if name is None:
- name = id
- if algorithm not in ("absolute", "incremental", "percentage-of-absolute-row", "percentage-of-incremental-row"):
- algorithm = "absolute"
-
- self._dimensions.append(str(id))
- if hidden:
- p = self._format(id, name, algorithm, multiplier, divisor, "hidden")
- else:
- p = self._format(id, name, algorithm, multiplier, divisor)
-
- self._line("DIMENSION", *p)
-
- def begin(self, type_id, microseconds=0):
- """
- Begin data set
- :param type_id: str
- :param microseconds: int
- :return: boolean
- """
- if type_id not in self._charts:
- self.error("wrong chart type_id:", type_id)
- return False
- try:
- int(microseconds)
- except TypeError:
- self.error("malformed begin statement: microseconds are not a number:", microseconds)
- microseconds = ""
-
- self._line("BEGIN", type_id, str(microseconds))
- return True
-
- def set(self, id, value):
- """
- Set value to dimension
- :param id: str
- :param value: int/float
- :return: boolean
- """
- if id not in self._dimensions:
- self.error("wrong dimension id:", id, "Available dimensions are:", *self._dimensions)
- return False
- try:
- value = str(int(value))
- except TypeError:
- self.error("cannot set non-numeric value:", str(value))
- return False
- self._line("SET", id, "=", str(value))
- self.__chart_set = True
- return True
-
- def end(self):
- if self.__chart_set:
- self._line("END")
- self.__chart_set = False
- else:
- pos = self._data_stream.rfind("BEGIN")
- self._data_stream = self._data_stream[:pos]
-
- def commit(self):
- """
- Upload new data to netdata.
- """
- try:
- print(self._data_stream)
- except Exception as e:
- msg.fatal('cannot send data to netdata:', str(e))
- self._data_stream = ""
-
- # --- ERROR HANDLING ---
-
- def error(self, *params):
- """
- Show error message on stderr
- """
- msg.error(self.chart_name, *params)
-
- def alert(self, *params):
- """
- Show error message on stderr
- """
- msg.alert(self.chart_name, *params)
-
- def debug(self, *params):
- """
- Show debug message on stderr
- """
- msg.debug(self.chart_name, *params)
-
- def info(self, *params):
- """
- Show information message on stderr
- """
- msg.info(self.chart_name, *params)
-
- # --- MAIN METHODS ---
-
- def _get_data(self):
- """
- Get some data
- :return: dict
- """
- return {}
-
- def check(self):
- """
- check() prototype
- :return: boolean
- """
- self.debug("Module", str(self.__module__), "doesn't implement check() function. Using default.")
- data = self._get_data()
-
- if data is None:
- self.debug("failed to receive data during check().")
- return False
-
- if len(data) == 0:
- self.debug("empty data during check().")
- return False
-
- self.debug("successfully received data during check(): '" + str(data) + "'")
- return True
-
- def create(self):
- """
- Create charts
- :return: boolean
- """
- data = self._data_from_check or self._get_data()
- if data is None:
- self.debug("failed to receive data during create().")
- return False
-
- idx = 0
- for name in self.order:
- options = self.definitions[name]['options'] + [self.priority + idx, self.update_every]
- self.chart(self.chart_name + "." + name, *options)
- # check if server has this datapoint
- for line in self.definitions[name]['lines']:
- if line[0] in data:
- self.dimension(*line)
- idx += 1
-
- self.commit()
- return True
-
- def update(self, interval):
- """
- Update charts
- :param interval: int
- :return: boolean
- """
- data = self._get_data()
- if data is None:
- self.debug("failed to receive data during update().")
- return False
-
- updated = False
- for chart in self.order:
- if self.begin(self.chart_name + "." + chart, interval):
- updated = True
- for dim in self.definitions[chart]['lines']:
- try:
- self.set(dim[0], data[dim[0]])
- except KeyError:
- pass
- self.end()
-
- self.commit()
- if not updated:
- self.error("no charts to update")
-
- return updated
-
- @staticmethod
- def find_binary(binary):
- try:
- if isinstance(binary, str):
- binary = os.path.basename(binary)
- return next(('/'.join([p, binary]) for p in PATH
- if os.path.isfile('/'.join([p, binary]))
- and os.access('/'.join([p, binary]), os.X_OK)))
- return None
- except StopIteration:
- return None
-
- def _add_new_dimension(self, dimension_id, chart_name, dimension=None, algorithm='incremental',
- multiplier=1, divisor=1, priority=65000):
- """
- :param dimension_id:
- :param chart_name:
- :param dimension:
- :param algorithm:
- :param multiplier:
- :param divisor:
- :param priority:
- :return:
- """
- if not all([dimension_id not in self._dimensions,
- chart_name in self.order,
- chart_name in self.definitions]):
- return
- self._dimensions.append(dimension_id)
- dimension_list = list(map(str, [dimension_id,
- dimension if dimension else dimension_id,
- algorithm,
- multiplier,
- divisor]))
- self.definitions[chart_name]['lines'].append(dimension_list)
- add_to_name = self.override_name or self.name
- job_name = ('_'.join([self.__module__, re.sub('\s+', '_', add_to_name)])
- if add_to_name != 'None' else self.__module__)
- chart = 'CHART {0}.{1} '.format(job_name, chart_name)
- options = '"" "{0}" {1} "{2}" {3} {4} '.format(*self.definitions[chart_name]['options'][1:6])
- other = '{0} {1}\n'.format(priority, self.update_every)
- new_dimension = "DIMENSION {0}\n".format(' '.join(dimension_list))
- print(chart + options + other + new_dimension)
-
-
-class UrlService(SimpleService):
- def __init__(self, configuration=None, name=None):
- SimpleService.__init__(self, configuration=configuration, name=name)
- self.url = self.configuration.get('url')
- self.user = self.configuration.get('user')
- self.password = self.configuration.get('pass')
- self.proxy_user = self.configuration.get('proxy_user')
- self.proxy_password = self.configuration.get('proxy_pass')
- self.proxy_url = self.configuration.get('proxy_url')
- self._manager = None
-
- def __make_headers(self, **header_kw):
- user = header_kw.get('user') or self.user
- password = header_kw.get('pass') or self.password
- proxy_user = header_kw.get('proxy_user') or self.proxy_user
- proxy_password = header_kw.get('proxy_pass') or self.proxy_password
- header_params = dict(keep_alive=True)
- proxy_header_params = dict()
- if user and password:
- header_params['basic_auth'] = '{user}:{password}'.format(user=user,
- password=password)
- if proxy_user and proxy_password:
- proxy_header_params['proxy_basic_auth'] = '{user}:{password}'.format(user=proxy_user,
- password=proxy_password)
- try:
- return urllib3.make_headers(**header_params), urllib3.make_headers(**proxy_header_params)
- except TypeError as error:
- self.error('build_header() error: {error}'.format(error=error))
- return None, None
-
- def _build_manager(self, **header_kw):
- header, proxy_header = self.__make_headers(**header_kw)
- if header is None or proxy_header is None:
- return None
- proxy_url = header_kw.get('proxy_url') or self.proxy_url
- if proxy_url:
- manager = urllib3.ProxyManager
- params = dict(proxy_url=proxy_url, headers=header, proxy_headers=proxy_header)
- else:
- manager = urllib3.PoolManager
- params = dict(headers=header)
- try:
- return manager(**params)
- except (urllib3.exceptions.ProxySchemeUnknown, TypeError) as error:
- self.error('build_manager() error:', str(error))
- return None
-
- def _get_raw_data(self, url=None, manager=None):
- """
- Get raw data from http request
- :return: str
- """
- try:
- url = url or self.url
- manager = manager or self._manager
- # TODO: timeout, retries and method hardcoded..
- response = manager.request(method='GET',
- url=url,
- timeout=1,
- retries=1,
- headers=manager.headers)
- except (urllib3.exceptions.HTTPError, TypeError, AttributeError) as error:
- self.error('Url: {url}. Error: {error}'.format(url=url, error=error))
- return None
- if response.status == 200:
- return response.data.decode()
- self.debug('Url: {url}. Http response status code: {code}'.format(url=url, code=response.status))
- return None
-
- def check(self):
- """
- Format configuration data and try to connect to server
- :return: boolean
- """
- if not (self.url and isinstance(self.url, str)):
- self.error('URL is not defined or type is not <str>')
- return False
-
- self._manager = self._build_manager()
- if not self._manager:
- return False
-
- try:
- data = self._get_data()
- except Exception as error:
- self.error('_get_data() failed. Url: {url}. Error: {error}'.format(url=self.url, error=error))
- return False
-
- if isinstance(data, dict) and data:
- self._data_from_check = data
- return True
- self.error('_get_data() returned no data or type is not <dict>')
- return False
-
-
-class SocketService(SimpleService):
- def __init__(self, configuration=None, name=None):
- self._sock = None
- self._keep_alive = False
- self.host = "localhost"
- self.port = None
- self.unix_socket = None
- self.request = ""
- self.__socket_config = None
- self.__empty_request = "".encode()
- SimpleService.__init__(self, configuration=configuration, name=name)
-
- def _socketerror(self, message=None):
- if self.unix_socket is not None:
- self.error("unix socket '" + self.unix_socket + "':", message)
- else:
- if self.__socket_config is not None:
- af, socktype, proto, canonname, sa = self.__socket_config
- self.error("socket to '" + str(sa[0]) + "' port " + str(sa[1]) + ":", message)
- else:
- self.error("unknown socket:", message)
-
- def _connect2socket(self, res=None):
- """
- Connect to a socket, passing the result of getaddrinfo()
- :return: boolean
- """
- if res is None:
- res = self.__socket_config
- if res is None:
- self.error("Cannot create socket to 'None':")
- return False
-
- af, socktype, proto, canonname, sa = res
- try:
- self.debug("creating socket to '" + str(sa[0]) + "', port " + str(sa[1]))
- self._sock = socket.socket(af, socktype, proto)
- except socket.error as e:
- self.error("Failed to create socket to '" + str(sa[0]) + "', port " + str(sa[1]) + ":", str(e))
- self._sock = None
- self.__socket_config = None
- return False
-
- try:
- self.debug("connecting socket to '" + str(sa[0]) + "', port " + str(sa[1]))
- self._sock.connect(sa)
- except socket.error as e:
- self.error("Failed to connect to '" + str(sa[0]) + "', port " + str(sa[1]) + ":", str(e))
- self._disconnect()
- self.__socket_config = None
- return False
-
- self.debug("connected to '" + str(sa[0]) + "', port " + str(sa[1]))
- self.__socket_config = res
- return True
-
- def _connect2unixsocket(self):
- """
- Connect to a unix socket, given its filename
- :return: boolean
- """
- if self.unix_socket is None:
- self.error("cannot connect to unix socket 'None'")
- return False
-
- try:
- self.debug("attempting DGRAM unix socket '" + str(self.unix_socket) + "'")
- self._sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
- self._sock.connect(self.unix_socket)
- self.debug("connected DGRAM unix socket '" + str(self.unix_socket) + "'")
- return True
- except socket.error as e:
- self.debug("Failed to connect DGRAM unix socket '" + str(self.unix_socket) + "':", str(e))
-
- try:
- self.debug("attempting STREAM unix socket '" + str(self.unix_socket) + "'")
- self._sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- self._sock.connect(self.unix_socket)
- self.debug("connected STREAM unix socket '" + str(self.unix_socket) + "'")
- return True
- except socket.error as e:
- self.debug("Failed to connect STREAM unix socket '" + str(self.unix_socket) + "':", str(e))
- self.error("Failed to connect to unix socket '" + str(self.unix_socket) + "':", str(e))
- self._sock = None
- return False
-
- def _connect(self):
- """
- Recreate socket and connect to it since sockets cannot be reused after closing
- Available configurations are IPv6, IPv4 or UNIX socket
- :return:
- """
- try:
- if self.unix_socket is not None:
- self._connect2unixsocket()
-
- else:
- if self.__socket_config is not None:
- self._connect2socket()
- else:
- for res in socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM):
- if self._connect2socket(res): break
-
- except Exception as e:
- self._sock = None
- self.__socket_config = None
-
- if self._sock is not None:
- self._sock.setblocking(0)
- self._sock.settimeout(5)
- self.debug("set socket timeout to: " + str(self._sock.gettimeout()))
-
- def _disconnect(self):
- """
- Close socket connection
- :return:
- """
- if self._sock is not None:
- try:
- self.debug("closing socket")
- self._sock.shutdown(2) # 0 - read, 1 - write, 2 - all
- self._sock.close()
- except Exception:
- pass
- self._sock = None
-
- def _send(self):
- """
- Send request.
- :return: boolean
- """
- # Send request if it is needed
- if self.request != self.__empty_request:
- try:
- self.debug("sending request:", str(self.request))
- self._sock.send(self.request)
- except Exception as e:
- self._socketerror("error sending request:" + str(e))
- self._disconnect()
- return False
- return True
-
- def _receive(self):
- """
- Receive data from socket
- :return: str
- """
- data = ""
- while True:
- self.debug("receiving response")
- try:
- buf = self._sock.recv(4096)
- except Exception as e:
- self._socketerror("failed to receive response:" + str(e))
- self._disconnect()
- break
-
- if buf is None or len(buf) == 0: # handle server disconnect
- if data == "":
- self._socketerror("unexpectedly disconnected")
- else:
- self.debug("server closed the connection")
- self._disconnect()
- break
-
- self.debug("received data:", str(buf))
- data += buf.decode('utf-8', 'ignore')
- if self._check_raw_data(data):
- break
-
- self.debug("final response:", str(data))
- return data
-
- def _get_raw_data(self):
- """
- Get raw data with low-level "socket" module.
- :return: str
- """
- if self._sock is None:
- self._connect()
- if self._sock is None:
- return None
-
- # Send request if it is needed
- if not self._send():
- return None
-
- data = self._receive()
-
- if not self._keep_alive:
- self._disconnect()
-
- return data
-
- def _check_raw_data(self, data):
- """
- Check if all data has been gathered from socket
- :param data: str
- :return: boolean
- """
- return True
-
- def _parse_config(self):
- """
- Parse configuration data
- :return: boolean
- """
- if self.name is None or self.name == str(None):
- self.name = ""
- else:
- self.name = str(self.name)
-
- try:
- self.unix_socket = str(self.configuration['socket'])
- except (KeyError, TypeError):
- self.debug("No unix socket specified. Trying TCP/IP socket.")
- self.unix_socket = None
- try:
- self.host = str(self.configuration['host'])
- except (KeyError, TypeError):
- self.debug("No host specified. Using: '" + self.host + "'")
- try:
- self.port = int(self.configuration['port'])
- except (KeyError, TypeError):
- self.debug("No port specified. Using: '" + str(self.port) + "'")
-
- try:
- self.request = str(self.configuration['request'])
- except (KeyError, TypeError):
- self.debug("No request specified. Using: '" + str(self.request) + "'")
-
- self.request = self.request.encode()
-
- def check(self):
- self._parse_config()
- return SimpleService.check(self)
-
-
-class LogService(SimpleService):
- def __init__(self, configuration=None, name=None):
- SimpleService.__init__(self, configuration=configuration, name=name)
- self.log_path = self.configuration.get('path')
- self.__glob_path = self.log_path
- self._last_position = 0
- self.retries = 100000 # basically always retry
- self.__re_find = dict(current=0, run=0, maximum=60)
-
- def _get_raw_data(self):
- """
- Get log lines since last poll
- :return: list
- """
- lines = list()
- try:
- if self.__re_find['current'] == self.__re_find['run']:
- self._find_recent_log_file()
- size = os.path.getsize(self.log_path)
- if size == self._last_position:
- self.__re_find['current'] += 1
- return list() # return empty list if nothing has changed
- elif size < self._last_position:
- self._last_position = 0 # read from beginning if file has shrunk
-
- with open(self.log_path) as fp:
- fp.seek(self._last_position)
- for line in fp:
- lines.append(line)
- self._last_position = fp.tell()
- self.__re_find['current'] = 0
- except (OSError, IOError) as error:
- self.__re_find['current'] += 1
- self.error(str(error))
-
- return lines or None
-
- def _find_recent_log_file(self):
- """
- :return:
- """
- self.__re_find['run'] = self.__re_find['maximum']
- self.__re_find['current'] = 0
- self.__glob_path = self.__glob_path or self.log_path # workaround for modules w/o config files
- path_list = glob(self.__glob_path)
- if path_list:
- self.log_path = max(path_list)
- return True
- return False
-
- def check(self):
- """
- Parse basic configuration and check if log file exists
- :return: boolean
- """
- if not self.log_path:
- self.error("No path to log specified")
- return None
-
- if all([self._find_recent_log_file(),
- os.access(self.log_path, os.R_OK),
- os.path.isfile(self.log_path)]):
- return True
- self.error("Cannot access %s" % self.log_path)
- return False
-
- def create(self):
- # set cursor at last byte of log file
- self._last_position = os.path.getsize(self.log_path)
- status = SimpleService.create(self)
- # self._last_position = 0
- return status
-
-
-class ExecutableService(SimpleService):
-
- def __init__(self, configuration=None, name=None):
- SimpleService.__init__(self, configuration=configuration, name=name)
- self.command = None
-
- def _get_raw_data(self, stderr=False):
- """
- Get raw data from executed command
- :return: <list>
- """
- try:
- p = Popen(self.command, stdout=PIPE, stderr=PIPE)
- except Exception as error:
- self.error("Executing command", " ".join(self.command), "resulted in error:", str(error))
- return None
- data = list()
- std = p.stderr if stderr else p.stdout
- for line in std.readlines():
- data.append(line.decode())
-
- return data or None
-
- def check(self):
- """
- Parse basic configuration, check if command is whitelisted and is returning values
- :return: <boolean>
- """
- # Preference: 1. "command" from configuration file 2. "command" from plugin (if specified)
- if 'command' in self.configuration:
- self.command = self.configuration['command']
-
- # "command" must be: 1.not None 2. type <str>
- if not (self.command and isinstance(self.command, str)):
- self.error('Command is not defined or command type is not <str>')
- return False
-
- # Split "command" into: 1. command <str> 2. options <list>
- command, opts = self.command.split()[0], self.command.split()[1:]
-
- # Check for "bad" symbols in options. No pipes, redirects etc. TODO: what is missing?
- bad_opts = set(''.join(opts)) & set(['&', '|', ';', '>', '<'])
- if bad_opts:
- self.error("Bad command argument(s): %s" % bad_opts)
- return False
-
- # Find absolute path ('echo' => '/bin/echo')
- if '/' not in command:
- command = self.find_binary(command)
- if not command:
- self.error('Can\'t locate "%s" binary in PATH(%s)' % (self.command, PATH))
- return False
- # Check if binary exist and executable
- else:
- if not (os.path.isfile(command) and os.access(command, os.X_OK)):
- self.error('"%s" is not a file or not executable' % command)
- return False
-
- self.command = [command] + opts if opts else [command]
-
- try:
- data = self._get_data()
- except Exception as error:
- self.error('_get_data() failed. Command: %s. Error: %s' % (self.command, error))
- return False
-
- if isinstance(data, dict) and data:
- # We need this for create() method. No reason to execute get_data() again if result is not empty dict()
- self._data_from_check = data
- return True
- else:
- self.error("Command", str(self.command), "returned no data")
- return False
-
-
-class MySQLService(SimpleService):
-
- def __init__(self, configuration=None, name=None):
- SimpleService.__init__(self, configuration=configuration, name=name)
- self.__connection = None
- self.__conn_properties = dict()
- self.extra_conn_properties = dict()
- self.__queries = self.configuration.get('queries', dict())
- self.queries = dict()
-
- def __connect(self):
- try:
- connection = MySQLdb.connect(connect_timeout=self.update_every, **self.__conn_properties)
- except (MySQLdb.MySQLError, TypeError, AttributeError) as error:
- return None, str(error)
- else:
- return connection, None
-
- def check(self):
- def get_connection_properties(conf, extra_conf):
- properties = dict()
- if conf.get('user'):
- properties['user'] = conf['user']
- if conf.get('pass'):
- properties['passwd'] = conf['pass']
- if conf.get('socket'):
- properties['unix_socket'] = conf['socket']
- elif conf.get('host'):
- properties['host'] = conf['host']
- properties['port'] = int(conf.get('port', 3306))
- elif conf.get('my.cnf'):
- if MySQLdb.__name__ == 'pymysql':
- self.error('"my.cnf" parsing is not working for pymysql')
- else:
- properties['read_default_file'] = conf['my.cnf']
- if isinstance(extra_conf, dict) and extra_conf:
- properties.update(extra_conf)
-
- return properties or None
-
- def is_valid_queries_dict(raw_queries, log_error):
- """
- :param raw_queries: dict:
- :param log_error: function:
- :return: dict or None
-
- raw_queries is valid when: type <dict> and not empty after is_valid_query(for all queries)
- """
- def is_valid_query(query):
- return all([isinstance(query, str),
- query.startswith(('SELECT', 'select', 'SHOW', 'show'))])
-
- if hasattr(raw_queries, 'keys') and raw_queries:
- valid_queries = dict([(n, q) for n, q in raw_queries.items() if is_valid_query(q)])
- bad_queries = set(raw_queries) - set(valid_queries)
-
- if bad_queries:
- log_error('Removed query(s): %s' % bad_queries)
- return valid_queries
- else:
- log_error('Unsupported "queries" format. Must be not empty <dict>')
- return None
-
- if not PY_MYSQL:
- self.error('MySQLdb or PyMySQL module is needed to use mysql.chart.py plugin')
- return False
-
- # Preference: 1. "queries" from the configuration file 2. "queries" from the module
- self.queries = self.__queries or self.queries
- # Check if "self.queries" exist, not empty and all queries are in valid format
- self.queries = is_valid_queries_dict(self.queries, self.error)
- if not self.queries:
- return None
-
- # Get connection properties
- self.__conn_properties = get_connection_properties(self.configuration, self.extra_conn_properties)
- if not self.__conn_properties:
- self.error('Connection properties are missing')
- return False
-
- # Create connection to the database
- self.__connection, error = self.__connect()
- if error:
- self.error('Can\'t establish connection to MySQL: %s' % error)
- return False
-
- try:
- data = self._get_data()
- except Exception as error:
- self.error('_get_data() failed. Error: %s' % error)
- return False
-
- if isinstance(data, dict) and data:
- # We need this for create() method
- self._data_from_check = data
- return True
- else:
- self.error("_get_data() returned no data or type is not <dict>")
- return False
-
- def _get_raw_data(self, description=None):
- """
- Get raw data from MySQL server
- :return: dict: fetchall() or (fetchall(), description)
- """
-
- if not self.__connection:
- self.__connection, error = self.__connect()
- if error:
- return None
-
- raw_data = dict()
- queries = dict(self.queries)
- try:
- with self.__connection as cursor:
- for name, query in queries.items():
- try:
- cursor.execute(query)
- except (MySQLdb.ProgrammingError, MySQLdb.OperationalError) as error:
- if self.__is_error_critical(err_class=exc_info()[0], err_text=str(error)):
- raise RuntimeError
- self.error('Removed query: %s[%s]. Error: %s'
- % (name, query, error))
- self.queries.pop(name)
- continue
- else:
- raw_data[name] = (cursor.fetchall(), cursor.description) if description else cursor.fetchall()
- self.__connection.commit()
- except (MySQLdb.MySQLError, RuntimeError, TypeError, AttributeError):
- self.__connection.close()
- self.__connection = None
- return None
- else:
- return raw_data or None
-
- @staticmethod
- def __is_error_critical(err_class, err_text):
- return err_class == MySQLdb.OperationalError and all(['denied' not in err_text,
- 'Unknown column' not in err_text])
+# Description: backward compatibility with old version
+
+from bases.FrameworkServices.SimpleService import SimpleService
+from bases.FrameworkServices.UrlService import UrlService
+from bases.FrameworkServices.SocketService import SocketService
+from bases.FrameworkServices.LogService import LogService
+from bases.FrameworkServices.ExecutableService import ExecutableService
+from bases.FrameworkServices.MySQLService import MySQLService
diff --git a/python.d/python_modules/bases/FrameworkServices/ExecutableService.py b/python.d/python_modules/bases/FrameworkServices/ExecutableService.py
new file mode 100644
index 00000000..b6d7871f
--- /dev/null
+++ b/python.d/python_modules/bases/FrameworkServices/ExecutableService.py
@@ -0,0 +1,85 @@
+# -*- coding: utf-8 -*-
+# Description:
+# Author: Pawel Krupa (paulfantom)
+# Author: Ilya Mashchenko (l2isbad)
+
+import os
+
+from subprocess import Popen, PIPE
+
+from bases.FrameworkServices.SimpleService import SimpleService
+from bases.collection import find_binary
+
+
+class ExecutableService(SimpleService):
+ def __init__(self, configuration=None, name=None):
+ SimpleService.__init__(self, configuration=configuration, name=name)
+ self.command = None
+
+ def _get_raw_data(self, stderr=False):
+ """
+ Get raw data from executed command
+ :return: <list>
+ """
+ try:
+ p = Popen(self.command, stdout=PIPE, stderr=PIPE)
+ except Exception as error:
+ self.error('Executing command {command} resulted in error: {error}'.format(command=self.command,
+ error=error))
+ return None
+ data = list()
+ std = p.stderr if stderr else p.stdout
+ for line in std:
+ data.append(line.decode())
+
+ return data or None
+
+ def check(self):
+ """
+ Parse basic configuration, check if command is whitelisted and is returning values
+ :return: <boolean>
+ """
+ # Preference: 1. "command" from configuration file 2. "command" from plugin (if specified)
+ if 'command' in self.configuration:
+ self.command = self.configuration['command']
+
+ # "command" must be: 1.not None 2. type <str>
+ if not (self.command and isinstance(self.command, str)):
+ self.error('Command is not defined or command type is not <str>')
+ return False
+
+ # Split "command" into: 1. command <str> 2. options <list>
+ command, opts = self.command.split()[0], self.command.split()[1:]
+
+ # Check for "bad" symbols in options. No pipes, redirects etc.
+ opts_list = ['&', '|', ';', '>', '<']
+ bad_opts = set(''.join(opts)) & set(opts_list)
+ if bad_opts:
+ self.error("Bad command argument(s): {opts}".format(opts=bad_opts))
+ return False
+
+ # Find absolute path ('echo' => '/bin/echo')
+ if '/' not in command:
+ command = find_binary(command)
+ if not command:
+ self.error('Can\'t locate "{command}" binary'.format(command=self.command))
+ return False
+ # Check if binary exist and executable
+ else:
+ if not os.access(command, os.X_OK):
+ self.error('"{binary}" is not executable'.format(binary=command))
+ return False
+
+ self.command = [command] + opts if opts else [command]
+
+ try:
+ data = self._get_data()
+ except Exception as error:
+ self.error('_get_data() failed. Command: {command}. Error: {error}'.format(command=self.command,
+ error=error))
+ return False
+
+ if isinstance(data, dict) and data:
+ return True
+ self.error('Command "{command}" returned no data'.format(command=self.command))
+ return False
diff --git a/python.d/python_modules/bases/FrameworkServices/LogService.py b/python.d/python_modules/bases/FrameworkServices/LogService.py
new file mode 100644
index 00000000..45daa244
--- /dev/null
+++ b/python.d/python_modules/bases/FrameworkServices/LogService.py
@@ -0,0 +1,78 @@
+# -*- coding: utf-8 -*-
+# Description:
+# Author: Pawel Krupa (paulfantom)
+
+from glob import glob
+import os
+
+from bases.FrameworkServices.SimpleService import SimpleService
+
+
+class LogService(SimpleService):
+ def __init__(self, configuration=None, name=None):
+ SimpleService.__init__(self, configuration=configuration, name=name)
+ self.log_path = self.configuration.get('path')
+ self.__glob_path = self.log_path
+ self._last_position = 0
+ self.__re_find = dict(current=0, run=0, maximum=60)
+
+ def _get_raw_data(self):
+ """
+ Get log lines since last poll
+ :return: list
+ """
+ lines = list()
+ try:
+ if self.__re_find['current'] == self.__re_find['run']:
+ self._find_recent_log_file()
+ size = os.path.getsize(self.log_path)
+ if size == self._last_position:
+ self.__re_find['current'] += 1
+ return list() # return empty list if nothing has changed
+ elif size < self._last_position:
+ self._last_position = 0 # read from beginning if file has shrunk
+
+ with open(self.log_path) as fp:
+ fp.seek(self._last_position)
+ for line in fp:
+ lines.append(line)
+ self._last_position = fp.tell()
+ self.__re_find['current'] = 0
+ except (OSError, IOError) as error:
+ self.__re_find['current'] += 1
+ self.error(str(error))
+
+ return lines or None
+
+ def _find_recent_log_file(self):
+ """
+ :return:
+ """
+ self.__re_find['run'] = self.__re_find['maximum']
+ self.__re_find['current'] = 0
+ self.__glob_path = self.__glob_path or self.log_path # workaround for modules w/o config files
+ path_list = glob(self.__glob_path)
+ if path_list:
+ self.log_path = max(path_list)
+ return True
+ return False
+
+ def check(self):
+ """
+ Parse basic configuration and check if log file exists
+ :return: boolean
+ """
+ if not self.log_path:
+ self.error('No path to log specified')
+ return None
+
+ if self._find_recent_log_file() and os.access(self.log_path, os.R_OK) and os.path.isfile(self.log_path):
+ return True
+ self.error('Cannot access {0}'.format(self.log_path))
+ return False
+
+ def create(self):
+ # set cursor at last byte of log file
+ self._last_position = os.path.getsize(self.log_path)
+ status = SimpleService.create(self)
+ return status
diff --git a/python.d/python_modules/bases/FrameworkServices/MySQLService.py b/python.d/python_modules/bases/FrameworkServices/MySQLService.py
new file mode 100644
index 00000000..3acc5b10
--- /dev/null
+++ b/python.d/python_modules/bases/FrameworkServices/MySQLService.py
@@ -0,0 +1,158 @@
+# -*- coding: utf-8 -*-
+# Description:
+# Author: Ilya Mashchenko (l2isbad)
+
+from sys import exc_info
+
+try:
+ import MySQLdb
+
+ PY_MYSQL = True
+except ImportError:
+ try:
+ import pymysql as MySQLdb
+
+ PY_MYSQL = True
+ except ImportError:
+ PY_MYSQL = False
+
+from bases.FrameworkServices.SimpleService import SimpleService
+
+
+class MySQLService(SimpleService):
+ def __init__(self, configuration=None, name=None):
+ SimpleService.__init__(self, configuration=configuration, name=name)
+ self.__connection = None
+ self.__conn_properties = dict()
+ self.extra_conn_properties = dict()
+ self.__queries = self.configuration.get('queries', dict())
+ self.queries = dict()
+
+ def __connect(self):
+ try:
+ connection = MySQLdb.connect(connect_timeout=self.update_every, **self.__conn_properties)
+ except (MySQLdb.MySQLError, TypeError, AttributeError) as error:
+ return None, str(error)
+ else:
+ return connection, None
+
+ def check(self):
+ def get_connection_properties(conf, extra_conf):
+ properties = dict()
+ if conf.get('user'):
+ properties['user'] = conf['user']
+ if conf.get('pass'):
+ properties['passwd'] = conf['pass']
+ if conf.get('socket'):
+ properties['unix_socket'] = conf['socket']
+ elif conf.get('host'):
+ properties['host'] = conf['host']
+ properties['port'] = int(conf.get('port', 3306))
+ elif conf.get('my.cnf'):
+ if MySQLdb.__name__ == 'pymysql':
+ self.error('"my.cnf" parsing is not working for pymysql')
+ else:
+ properties['read_default_file'] = conf['my.cnf']
+ if isinstance(extra_conf, dict) and extra_conf:
+ properties.update(extra_conf)
+
+ return properties or None
+
+ def is_valid_queries_dict(raw_queries, log_error):
+ """
+ :param raw_queries: dict:
+ :param log_error: function:
+ :return: dict or None
+
+ raw_queries is valid when: type <dict> and not empty after is_valid_query(for all queries)
+ """
+
+ def is_valid_query(query):
+ return all([isinstance(query, str),
+ query.startswith(('SELECT', 'select', 'SHOW', 'show'))])
+
+ if hasattr(raw_queries, 'keys') and raw_queries:
+ valid_queries = dict([(n, q) for n, q in raw_queries.items() if is_valid_query(q)])
+ bad_queries = set(raw_queries) - set(valid_queries)
+
+ if bad_queries:
+ log_error('Removed query(s): {queries}'.format(queries=bad_queries))
+ return valid_queries
+ else:
+ log_error('Unsupported "queries" format. Must be not empty <dict>')
+ return None
+
+ if not PY_MYSQL:
+ self.error('MySQLdb or PyMySQL module is needed to use mysql.chart.py plugin')
+ return False
+
+ # Preference: 1. "queries" from the configuration file 2. "queries" from the module
+ self.queries = self.__queries or self.queries
+ # Check if "self.queries" exist, not empty and all queries are in valid format
+ self.queries = is_valid_queries_dict(self.queries, self.error)
+ if not self.queries:
+ return None
+
+ # Get connection properties
+ self.__conn_properties = get_connection_properties(self.configuration, self.extra_conn_properties)
+ if not self.__conn_properties:
+ self.error('Connection properties are missing')
+ return False
+
+ # Create connection to the database
+ self.__connection, error = self.__connect()
+ if error:
+ self.error('Can\'t establish connection to MySQL: {error}'.format(error=error))
+ return False
+
+ try:
+ data = self._get_data()
+ except Exception as error:
+ self.error('_get_data() failed. Error: {error}'.format(error=error))
+ return False
+
+ if isinstance(data, dict) and data:
+ return True
+ self.error("_get_data() returned no data or type is not <dict>")
+ return False
+
+ def _get_raw_data(self, description=None):
+ """
+ Get raw data from MySQL server
+ :return: dict: fetchall() or (fetchall(), description)
+ """
+
+ if not self.__connection:
+ self.__connection, error = self.__connect()
+ if error:
+ return None
+
+ raw_data = dict()
+ queries = dict(self.queries)
+ try:
+ with self.__connection as cursor:
+ for name, query in queries.items():
+ try:
+ cursor.execute(query)
+ except (MySQLdb.ProgrammingError, MySQLdb.OperationalError) as error:
+ if self.__is_error_critical(err_class=exc_info()[0], err_text=str(error)):
+ raise RuntimeError
+ self.error('Removed query: {name}[{query}]. Error: error'.format(name=name,
+ query=query,
+ error=error))
+ self.queries.pop(name)
+ continue
+ else:
+ raw_data[name] = (cursor.fetchall(), cursor.description) if description else cursor.fetchall()
+ self.__connection.commit()
+ except (MySQLdb.MySQLError, RuntimeError, TypeError, AttributeError):
+ self.__connection.close()
+ self.__connection = None
+ return None
+ else:
+ return raw_data or None
+
+ @staticmethod
+ def __is_error_critical(err_class, err_text):
+ return err_class == MySQLdb.OperationalError and all(['denied' not in err_text,
+ 'Unknown column' not in err_text])
diff --git a/python.d/python_modules/bases/FrameworkServices/SimpleService.py b/python.d/python_modules/bases/FrameworkServices/SimpleService.py
new file mode 100644
index 00000000..14c83910
--- /dev/null
+++ b/python.d/python_modules/bases/FrameworkServices/SimpleService.py
@@ -0,0 +1,252 @@
+# -*- coding: utf-8 -*-
+# Description:
+# Author: Pawel Krupa (paulfantom)
+# Author: Ilya Mashchenko (l2isbad)
+
+from threading import Thread
+
+try:
+ from time import sleep, monotonic as time
+except ImportError:
+ from time import sleep, time
+
+from bases.charts import Charts, ChartError, create_runtime_chart
+from bases.collection import OldVersionCompatibility, safe_print
+from bases.loggers import PythonDLimitedLogger
+
+RUNTIME_CHART_UPDATE = 'BEGIN netdata.runtime_{job_name} {since_last}\n' \
+ 'SET run_time = {elapsed}\n' \
+ 'END\n'
+
+
+class RuntimeCounters:
+ def __init__(self, configuration):
+ """
+ :param configuration: <dict>
+ """
+ self.FREQ = int(configuration.pop('update_every'))
+ self.START_RUN = 0
+ self.NEXT_RUN = 0
+ self.PREV_UPDATE = 0
+ self.SINCE_UPDATE = 0
+ self.ELAPSED = 0
+ self.RETRIES = 0
+ self.RETRIES_MAX = configuration.pop('retries')
+ self.PENALTY = 0
+
+ def is_sleep_time(self):
+ return self.START_RUN < self.NEXT_RUN
+
+
+class SimpleService(Thread, PythonDLimitedLogger, OldVersionCompatibility, object):
+ """
+ Prototype of Service class.
+ Implemented basic functionality to run jobs by `python.d.plugin`
+ """
+ def __init__(self, configuration, name=''):
+ """
+ :param configuration: <dict>
+ :param name: <str>
+ """
+ Thread.__init__(self)
+ self.daemon = True
+ PythonDLimitedLogger.__init__(self)
+ OldVersionCompatibility.__init__(self)
+ self.configuration = configuration
+ self.order = list()
+ self.definitions = dict()
+
+ self.module_name = self.__module__
+ self.job_name = configuration.pop('job_name')
+ self.override_name = configuration.pop('override_name')
+ self.fake_name = None
+
+ self._runtime_counters = RuntimeCounters(configuration=configuration)
+ self.charts = Charts(job_name=self.actual_name,
+ priority=configuration.pop('priority'),
+ cleanup=configuration.pop('chart_cleanup'),
+ get_update_every=self.get_update_every,
+ module_name=self.module_name)
+
+ def __repr__(self):
+ return '<{cls_bases}: {name}>'.format(cls_bases=', '.join(c.__name__ for c in self.__class__.__bases__),
+ name=self.name)
+
+ @property
+ def name(self):
+ if self.job_name:
+ return '_'.join([self.module_name, self.override_name or self.job_name])
+ return self.module_name
+
+ def actual_name(self):
+ return self.fake_name or self.name
+
+ @property
+ def update_every(self):
+ return self._runtime_counters.FREQ
+
+ @update_every.setter
+ def update_every(self, value):
+ """
+ :param value: <int>
+ :return:
+ """
+ self._runtime_counters.FREQ = value
+
+ def get_update_every(self):
+ return self.update_every
+
+ def check(self):
+ """
+ check() prototype
+ :return: boolean
+ """
+ self.debug("job doesn't implement check() method. Using default which simply invokes get_data().")
+ data = self.get_data()
+ if data and isinstance(data, dict):
+ return True
+ self.debug('returned value is wrong: {0}'.format(data))
+ return False
+
+ @create_runtime_chart
+ def create(self):
+ for chart_name in self.order:
+ chart_config = self.definitions.get(chart_name)
+
+ if not chart_config:
+ self.debug("create() => [NOT ADDED] chart '{chart_name}' not in definitions. "
+ "Skipping it.".format(chart_name=chart_name))
+ continue
+
+ # create chart
+ chart_params = [chart_name] + chart_config['options']
+ try:
+ self.charts.add_chart(params=chart_params)
+ except ChartError as error:
+ self.error("create() => [NOT ADDED] (chart '{chart}': {error})".format(chart=chart_name,
+ error=error))
+ continue
+
+ # add dimensions to chart
+ for dimension in chart_config['lines']:
+ try:
+ self.charts[chart_name].add_dimension(dimension)
+ except ChartError as error:
+ self.error("create() => [NOT ADDED] (dimension '{dimension}': {error})".format(dimension=dimension,
+ error=error))
+ continue
+
+ # add variables to chart
+ if 'variables' in chart_config:
+ for variable in chart_config['variables']:
+ try:
+ self.charts[chart_name].add_variable(variable)
+ except ChartError as error:
+ self.error("create() => [NOT ADDED] (variable '{var}': {error})".format(var=variable,
+ error=error))
+ continue
+
+ del self.order
+ del self.definitions
+
+ # True if job has at least 1 chart else False
+ return bool(self.charts)
+
+ def run(self):
+ """
+ Runs job in thread. Handles retries.
+ Exits when job failed or timed out.
+ :return: None
+ """
+ job = self._runtime_counters
+ self.debug('started, update frequency: {freq}, '
+ 'retries: {retries}'.format(freq=job.FREQ, retries=job.RETRIES_MAX - job.RETRIES))
+
+ while True:
+ job.START_RUN = time()
+
+ job.NEXT_RUN = job.START_RUN - (job.START_RUN % job.FREQ) + job.FREQ + job.PENALTY
+
+ self.sleep_until_next_run()
+
+ if job.PREV_UPDATE:
+ job.SINCE_UPDATE = int((job.START_RUN - job.PREV_UPDATE) * 1e6)
+
+ try:
+ updated = self.update(interval=job.SINCE_UPDATE)
+ except Exception as error:
+ self.error('update() unhandled exception: {error}'.format(error=error))
+ updated = False
+
+ if not updated:
+ if not self.manage_retries():
+ return
+ else:
+ job.ELAPSED = int((time() - job.START_RUN) * 1e3)
+ job.PREV_UPDATE = job.START_RUN
+ job.RETRIES, job.PENALTY = 0, 0
+ safe_print(RUNTIME_CHART_UPDATE.format(job_name=self.name,
+ since_last=job.SINCE_UPDATE,
+ elapsed=job.ELAPSED))
+ self.debug('update => [{status}] (elapsed time: {elapsed}, '
+ 'retries left: {retries})'.format(status='OK' if updated else 'FAILED',
+ elapsed=job.ELAPSED if updated else '-',
+ retries=job.RETRIES_MAX - job.RETRIES))
+
+ def update(self, interval):
+ """
+ :return:
+ """
+ data = self.get_data()
+ if not data:
+ self.debug('get_data() returned no data')
+ return False
+ elif not isinstance(data, dict):
+ self.debug('get_data() returned incorrect type data')
+ return False
+
+ updated = False
+
+ for chart in self.charts:
+ if chart.flags.obsoleted:
+ continue
+ elif self.charts.cleanup and chart.penalty >= self.charts.cleanup:
+ chart.obsolete()
+ self.error("chart '{0}' was suppressed due to non updating".format(chart.name))
+ continue
+
+ ok = chart.update(data, interval)
+ if ok:
+ updated = True
+
+ if not updated:
+ self.debug('none of the charts has been updated')
+
+ return updated
+
+ def manage_retries(self):
+ rc = self._runtime_counters
+ rc.RETRIES += 1
+ if rc.RETRIES % 5 == 0:
+ rc.PENALTY = int(rc.RETRIES * self.update_every / 2)
+ if rc.RETRIES >= rc.RETRIES_MAX:
+ self.error('stopped after {0} data collection failures in a row'.format(rc.RETRIES_MAX))
+ return False
+ return True
+
+ def sleep_until_next_run(self):
+ job = self._runtime_counters
+
+ # sleep() is interruptable
+ while job.is_sleep_time():
+ sleep_time = job.NEXT_RUN - job.START_RUN
+ self.debug('sleeping for {sleep_time} to reach frequency of {freq} sec'.format(sleep_time=sleep_time,
+ freq=job.FREQ + job.PENALTY))
+ sleep(sleep_time)
+ job.START_RUN = time()
+
+ def get_data(self):
+ return self._get_data()
+
+ def _get_data(self):
+ raise NotImplementedError
diff --git a/python.d/python_modules/bases/FrameworkServices/SocketService.py b/python.d/python_modules/bases/FrameworkServices/SocketService.py
new file mode 100644
index 00000000..90631df1
--- /dev/null
+++ b/python.d/python_modules/bases/FrameworkServices/SocketService.py
@@ -0,0 +1,250 @@
+# -*- coding: utf-8 -*-
+# Description:
+# Author: Pawel Krupa (paulfantom)
+
+import socket
+
+from bases.FrameworkServices.SimpleService import SimpleService
+
+
+class SocketService(SimpleService):
+ def __init__(self, configuration=None, name=None):
+ self._sock = None
+ self._keep_alive = False
+ self.host = 'localhost'
+ self.port = None
+ self.unix_socket = None
+ self.request = ''
+ self.__socket_config = None
+ self.__empty_request = "".encode()
+ SimpleService.__init__(self, configuration=configuration, name=name)
+
+ def _socket_error(self, message=None):
+ if self.unix_socket is not None:
+ self.error('unix socket "{socket}": {message}'.format(socket=self.unix_socket,
+ message=message))
+ else:
+ if self.__socket_config is not None:
+ af, sock_type, proto, canon_name, sa = self.__socket_config
+ self.error('socket to "{address}" port {port}: {message}'.format(address=sa[0],
+ port=sa[1],
+ message=message))
+ else:
+ self.error('unknown socket: {0}'.format(message))
+
+ def _connect2socket(self, res=None):
+ """
+ Connect to a socket, passing the result of getaddrinfo()
+ :return: boolean
+ """
+ if res is None:
+ res = self.__socket_config
+ if res is None:
+ self.error("Cannot create socket to 'None':")
+ return False
+
+ af, sock_type, proto, canon_name, sa = res
+ try:
+ self.debug('Creating socket to "{address}", port {port}'.format(address=sa[0], port=sa[1]))
+ self._sock = socket.socket(af, sock_type, proto)
+ except socket.error as error:
+ self.error('Failed to create socket "{address}", port {port}, error: {error}'.format(address=sa[0],
+ port=sa[1],
+ error=error))
+ self._sock = None
+ self.__socket_config = None
+ return False
+
+ try:
+ self.debug('connecting socket to "{address}", port {port}'.format(address=sa[0], port=sa[1]))
+ self._sock.connect(sa)
+ except socket.error as error:
+ self.error('Failed to connect to "{address}", port {port}, error: {error}'.format(address=sa[0],
+ port=sa[1],
+ error=error))
+ self._disconnect()
+ self.__socket_config = None
+ return False
+
+ self.debug('connected to "{address}", port {port}'.format(address=sa[0], port=sa[1]))
+ self.__socket_config = res
+ return True
+
+ def _connect2unixsocket(self):
+ """
+ Connect to a unix socket, given its filename
+ :return: boolean
+ """
+ if self.unix_socket is None:
+ self.error("cannot connect to unix socket 'None'")
+ return False
+
+ try:
+ self.debug('attempting DGRAM unix socket "{0}"'.format(self.unix_socket))
+ self._sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
+ self._sock.connect(self.unix_socket)
+ self.debug('connected DGRAM unix socket "{0}"'.format(self.unix_socket))
+ return True
+ except socket.error as error:
+ self.debug('Failed to connect DGRAM unix socket "{socket}": {error}'.format(socket=self.unix_socket,
+ error=error))
+
+ try:
+ self.debug('attempting STREAM unix socket "{0}"'.format(self.unix_socket))
+ self._sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+ self._sock.connect(self.unix_socket)
+ self.debug('connected STREAM unix socket "{0}"'.format(self.unix_socket))
+ return True
+ except socket.error as error:
+ self.debug('Failed to connect STREAM unix socket "{socket}": {error}'.format(socket=self.unix_socket,
+ error=error))
+ self._sock = None
+ return False
+
+ def _connect(self):
+ """
+ Recreate socket and connect to it since sockets cannot be reused after closing
+ Available configurations are IPv6, IPv4 or UNIX socket
+ :return:
+ """
+ try:
+ if self.unix_socket is not None:
+ self._connect2unixsocket()
+
+ else:
+ if self.__socket_config is not None:
+ self._connect2socket()
+ else:
+ for res in socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM):
+ if self._connect2socket(res):
+ break
+
+ except Exception:
+ self._sock = None
+ self.__socket_config = None
+
+ if self._sock is not None:
+ self._sock.setblocking(0)
+ self._sock.settimeout(5)
+ self.debug('set socket timeout to: {0}'.format(self._sock.gettimeout()))
+
+ def _disconnect(self):
+ """
+ Close socket connection
+ :return:
+ """
+ if self._sock is not None:
+ try:
+ self.debug('closing socket')
+ self._sock.shutdown(2) # 0 - read, 1 - write, 2 - all
+ self._sock.close()
+ except Exception:
+ pass
+ self._sock = None
+
+ def _send(self):
+ """
+ Send request.
+ :return: boolean
+ """
+ # Send request if it is needed
+ if self.request != self.__empty_request:
+ try:
+ self.debug('sending request: {0}'.format(self.request))
+ self._sock.send(self.request)
+ except Exception as error:
+ self._socket_error('error sending request: {0}'.format(error))
+ self._disconnect()
+ return False
+ return True
+
+ def _receive(self):
+ """
+ Receive data from socket
+ :return: str
+ """
+ data = ""
+ while True:
+ self.debug('receiving response')
+ try:
+ buf = self._sock.recv(4096)
+ except Exception as error:
+ self._socket_error('failed to receive response: {0}'.format(error))
+ self._disconnect()
+ break
+
+ if buf is None or len(buf) == 0: # handle server disconnect
+ if data == "":
+ self._socket_error('unexpectedly disconnected')
+ else:
+ self.debug('server closed the connection')
+ self._disconnect()
+ break
+
+ self.debug('received data')
+ data += buf.decode('utf-8', 'ignore')
+ if self._check_raw_data(data):
+ break
+
+ self.debug('final response: {0}'.format(data))
+ return data
+
+ def _get_raw_data(self):
+ """
+ Get raw data with low-level "socket" module.
+ :return: str
+ """
+ if self._sock is None:
+ self._connect()
+ if self._sock is None:
+ return None
+
+ # Send request if it is needed
+ if not self._send():
+ return None
+
+ data = self._receive()
+
+ if not self._keep_alive:
+ self._disconnect()
+
+ return data
+
+ @staticmethod
+ def _check_raw_data(data):
+ """
+ Check if all data has been gathered from socket
+ :param data: str
+ :return: boolean
+ """
+ return bool(data)
+
+ def _parse_config(self):
+ """
+ Parse configuration data
+ :return: boolean
+ """
+ try:
+ self.unix_socket = str(self.configuration['socket'])
+ except (KeyError, TypeError):
+ self.debug('No unix socket specified. Trying TCP/IP socket.')
+ self.unix_socket = None
+ try:
+ self.host = str(self.configuration['host'])
+ except (KeyError, TypeError):
+ self.debug('No host specified. Using: "{0}"'.format(self.host))
+ try:
+ self.port = int(self.configuration['port'])
+ except (KeyError, TypeError):
+ self.debug('No port specified. Using: "{0}"'.format(self.port))
+
+ try:
+ self.request = str(self.configuration['request'])
+ except (KeyError, TypeError):
+ self.debug('No request specified. Using: "{0}"'.format(self.request))
+
+ self.request = self.request.encode()
+
+ def check(self):
+ self._parse_config()
+ return SimpleService.check(self)
diff --git a/python.d/python_modules/bases/FrameworkServices/UrlService.py b/python.d/python_modules/bases/FrameworkServices/UrlService.py
new file mode 100644
index 00000000..0941ab16
--- /dev/null
+++ b/python.d/python_modules/bases/FrameworkServices/UrlService.py
@@ -0,0 +1,115 @@
+# -*- coding: utf-8 -*-
+# Description:
+# Author: Pawel Krupa (paulfantom)
+# Author: Ilya Mashchenko (l2isbad)
+
+import urllib3
+
+from bases.FrameworkServices.SimpleService import SimpleService
+
+try:
+ urllib3.disable_warnings()
+except AttributeError:
+ pass
+
+
+class UrlService(SimpleService):
+ def __init__(self, configuration=None, name=None):
+ SimpleService.__init__(self, configuration=configuration, name=name)
+ self.url = self.configuration.get('url')
+ self.user = self.configuration.get('user')
+ self.password = self.configuration.get('pass')
+ self.proxy_user = self.configuration.get('proxy_user')
+ self.proxy_password = self.configuration.get('proxy_pass')
+ self.proxy_url = self.configuration.get('proxy_url')
+ self.header = self.configuration.get('header')
+ self.request_timeout = self.configuration.get('timeout', 1)
+ self._manager = None
+
+ def __make_headers(self, **header_kw):
+ user = header_kw.get('user') or self.user
+ password = header_kw.get('pass') or self.password
+ proxy_user = header_kw.get('proxy_user') or self.proxy_user
+ proxy_password = header_kw.get('proxy_pass') or self.proxy_password
+ custom_header = header_kw.get('header') or self.header
+ header_params = dict(keep_alive=True)
+ proxy_header_params = dict()
+ if user and password:
+ header_params['basic_auth'] = '{user}:{password}'.format(user=user,
+ password=password)
+ if proxy_user and proxy_password:
+ proxy_header_params['proxy_basic_auth'] = '{user}:{password}'.format(user=proxy_user,
+ password=proxy_password)
+ try:
+ header, proxy_header = urllib3.make_headers(**header_params), urllib3.make_headers(**proxy_header_params)
+ except TypeError as error:
+ self.error('build_header() error: {error}'.format(error=error))
+ return None, None
+ else:
+ header.update(custom_header or dict())
+ return header, proxy_header
+
+ def _build_manager(self, **header_kw):
+ header, proxy_header = self.__make_headers(**header_kw)
+ if header is None or proxy_header is None:
+ return None
+ proxy_url = header_kw.get('proxy_url') or self.proxy_url
+ if proxy_url:
+ manager = urllib3.ProxyManager
+ params = dict(proxy_url=proxy_url, headers=header, proxy_headers=proxy_header)
+ else:
+ manager = urllib3.PoolManager
+ params = dict(headers=header)
+ try:
+ url = header_kw.get('url') or self.url
+ if url.startswith('https'):
+ return manager(assert_hostname=False, cert_reqs='CERT_NONE', **params)
+ return manager(**params)
+ except (urllib3.exceptions.ProxySchemeUnknown, TypeError) as error:
+ self.error('build_manager() error:', str(error))
+ return None
+
+ def _get_raw_data(self, url=None, manager=None):
+ """
+ Get raw data from http request
+ :return: str
+ """
+ try:
+ url = url or self.url
+ manager = manager or self._manager
+ response = manager.request(method='GET',
+ url=url,
+ timeout=self.request_timeout,
+ retries=1,
+ headers=manager.headers)
+ except (urllib3.exceptions.HTTPError, TypeError, AttributeError) as error:
+ self.error('Url: {url}. Error: {error}'.format(url=url, error=error))
+ return None
+ if response.status == 200:
+ return response.data.decode()
+ self.debug('Url: {url}. Http response status code: {code}'.format(url=url, code=response.status))
+ return None
+
+ def check(self):
+ """
+ Format configuration data and try to connect to server
+ :return: boolean
+ """
+ if not (self.url and isinstance(self.url, str)):
+ self.error('URL is not defined or type is not <str>')
+ return False
+
+ self._manager = self._build_manager()
+ if not self._manager:
+ return False
+
+ try:
+ data = self._get_data()
+ except Exception as error:
+ self.error('_get_data() failed. Url: {url}. Error: {error}'.format(url=self.url, error=error))
+ return False
+
+ if isinstance(data, dict) and data:
+ return True
+ self.error('_get_data() returned no data or type is not <dict>')
+ return False
diff --git a/python.d/python_modules/bases/FrameworkServices/__init__.py b/python.d/python_modules/bases/FrameworkServices/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/python.d/python_modules/bases/FrameworkServices/__init__.py
diff --git a/python.d/python_modules/bases/__init__.py b/python.d/python_modules/bases/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/python.d/python_modules/bases/__init__.py
diff --git a/python.d/python_modules/bases/charts.py b/python.d/python_modules/bases/charts.py
new file mode 100644
index 00000000..1e9348e5
--- /dev/null
+++ b/python.d/python_modules/bases/charts.py
@@ -0,0 +1,376 @@
+# -*- coding: utf-8 -*-
+# Description:
+# Author: Ilya Mashchenko (l2isbad)
+
+from bases.collection import safe_print
+
+CHART_PARAMS = ['type', 'id', 'name', 'title', 'units', 'family', 'context', 'chart_type']
+DIMENSION_PARAMS = ['id', 'name', 'algorithm', 'multiplier', 'divisor', 'hidden']
+VARIABLE_PARAMS = ['id', 'value']
+
+CHART_TYPES = ['line', 'area', 'stacked']
+DIMENSION_ALGORITHMS = ['absolute', 'incremental', 'percentage-of-absolute-row', 'percentage-of-incremental-row']
+
+CHART_BEGIN = 'BEGIN {type}.{id} {since_last}\n'
+CHART_CREATE = "CHART {type}.{id} '{name}' '{title}' '{units}' '{family}' '{context}' " \
+ "{chart_type} {priority} {update_every} '' 'python.d.plugin' '{module_name}'\n"
+CHART_OBSOLETE = "CHART {type}.{id} '{name}' '{title}' '{units}' '{family}' '{context}' " \
+ "{chart_type} {priority} {update_every} 'obsolete'\n"
+
+
+DIMENSION_CREATE = "DIMENSION '{id}' '{name}' {algorithm} {multiplier} {divisor} '{hidden}'\n"
+DIMENSION_SET = "SET '{id}' = {value}\n"
+
+CHART_VARIABLE_SET = "VARIABLE CHART '{id}' = {value}\n"
+
+RUNTIME_CHART_CREATE = "CHART netdata.runtime_{job_name} '' 'Execution time for {job_name}' 'ms' 'python.d' " \
+ "netdata.pythond_runtime line 145000 {update_every}\n" \
+ "DIMENSION run_time 'run time' absolute 1 1\n"
+
+
+def create_runtime_chart(func):
+ """
+ Calls a wrapped function, then prints runtime chart to stdout.
+
+ Used as a decorator for SimpleService.create() method.
+ The whole point of making 'create runtime chart' functionality as a decorator was
+ to help users who re-implements create() in theirs classes.
+
+ :param func: class method
+ :return:
+ """
+ def wrapper(*args, **kwargs):
+ self = args[0]
+ ok = func(*args, **kwargs)
+ if ok:
+ safe_print(RUNTIME_CHART_CREATE.format(job_name=self.name,
+ update_every=self._runtime_counters.FREQ))
+ return ok
+ return wrapper
+
+
+class ChartError(Exception):
+ """Base-class for all exceptions raised by this module"""
+
+
+class DuplicateItemError(ChartError):
+ """Occurs when user re-adds a chart or a dimension that has already been added"""
+
+
+class ItemTypeError(ChartError):
+ """Occurs when user passes value of wrong type to Chart, Dimension or ChartVariable class"""
+
+
+class ItemValueError(ChartError):
+ """Occurs when user passes inappropriate value to Chart, Dimension or ChartVariable class"""
+
+
+class Charts:
+ """Represent a collection of charts
+
+ All charts stored in a dict.
+ Chart is a instance of Chart class.
+ Charts adding must be done using Charts.add_chart() method only"""
+ def __init__(self, job_name, priority, cleanup, get_update_every, module_name):
+ """
+ :param job_name: <bound method>
+ :param priority: <int>
+ :param get_update_every: <bound method>
+ """
+ self.job_name = job_name
+ self.priority = priority
+ self.cleanup = cleanup
+ self.get_update_every = get_update_every
+ self.module_name = module_name
+ self.charts = dict()
+
+ def __len__(self):
+ return len(self.charts)
+
+ def __iter__(self):
+ return iter(self.charts.values())
+
+ def __repr__(self):
+ return 'Charts({0})'.format(self)
+
+ def __str__(self):
+ return str([chart for chart in self.charts])
+
+ def __contains__(self, item):
+ return item in self.charts
+
+ def __getitem__(self, item):
+ return self.charts[item]
+
+ def __delitem__(self, key):
+ del self.charts[key]
+
+ def __bool__(self):
+ return bool(self.charts)
+
+ def __nonzero__(self):
+ return self.__bool__()
+
+ def add_chart(self, params):
+ """
+ Create Chart instance and add it to the dict
+
+ Manually adds job name, priority and update_every to params.
+ :param params: <list>
+ :return:
+ """
+ params = [self.job_name()] + params
+ new_chart = Chart(params)
+
+ new_chart.params['update_every'] = self.get_update_every()
+ new_chart.params['priority'] = self.priority
+ new_chart.params['module_name'] = self.module_name
+
+ self.priority += 1
+ self.charts[new_chart.id] = new_chart
+
+ return new_chart
+
+ def active_charts(self):
+ return [chart.id for chart in self if not chart.flags.obsoleted]
+
+
+class Chart:
+ """Represent a chart"""
+ def __init__(self, params):
+ """
+ :param params: <list>
+ """
+ if not isinstance(params, list):
+ raise ItemTypeError("'chart' must be a list type")
+ if not len(params) >= 8:
+ raise ItemValueError("invalid value for 'chart', must be {0}".format(CHART_PARAMS))
+
+ self.params = dict(zip(CHART_PARAMS, (p or str() for p in params)))
+ self.name = '{type}.{id}'.format(type=self.params['type'],
+ id=self.params['id'])
+ if self.params.get('chart_type') not in CHART_TYPES:
+ self.params['chart_type'] = 'absolute'
+
+ self.dimensions = list()
+ self.variables = set()
+ self.flags = ChartFlags()
+ self.penalty = 0
+
+ def __getattr__(self, item):
+ try:
+ return self.params[item]
+ except KeyError:
+ raise AttributeError("'{instance}' has no attribute '{attr}'".format(instance=repr(self),
+ attr=item))
+
+ def __repr__(self):
+ return 'Chart({0})'.format(self.id)
+
+ def __str__(self):
+ return self.id
+
+ def __iter__(self):
+ return iter(self.dimensions)
+
+ def __contains__(self, item):
+ return item in [dimension.id for dimension in self.dimensions]
+
+ def add_variable(self, variable):
+ """
+ :param variable: <list>
+ :return:
+ """
+ self.variables.add(ChartVariable(variable))
+
+ def add_dimension(self, dimension):
+ """
+ :param dimension: <list>
+ :return:
+ """
+ dim = Dimension(dimension)
+
+ if dim.id in self:
+ raise DuplicateItemError("'{dimension}' already in '{chart}' dimensions".format(dimension=dim.id,
+ chart=self.name))
+ self.refresh()
+ self.dimensions.append(dim)
+ return dim
+
+ def hide_dimension(self, dimension_id, reverse=False):
+ if dimension_id in self:
+ idx = self.dimensions.index(dimension_id)
+ dimension = self.dimensions[idx]
+ dimension.params['hidden'] = 'hidden' if not reverse else str()
+ self.refresh()
+
+ def create(self):
+ """
+ :return:
+ """
+ chart = CHART_CREATE.format(**self.params)
+ dimensions = ''.join([dimension.create() for dimension in self.dimensions])
+ variables = ''.join([var.set(var.value) for var in self.variables if var])
+
+ self.flags.push = False
+ self.flags.created = True
+
+ safe_print(chart + dimensions + variables)
+
+ def update(self, data, interval):
+ updated_dimensions, updated_variables = str(), str()
+
+ for dim in self.dimensions:
+ value = dim.get_value(data)
+ if value is not None:
+ updated_dimensions += dim.set(value)
+
+ for var in self.variables:
+ value = var.get_value(data)
+ if value is not None:
+ updated_variables += var.set(value)
+
+ if updated_dimensions:
+ since_last = interval if self.flags.updated else 0
+
+ if self.flags.push:
+ self.create()
+
+ chart_begin = CHART_BEGIN.format(type=self.type, id=self.id, since_last=since_last)
+ safe_print(chart_begin, updated_dimensions, updated_variables, 'END\n')
+
+ self.flags.updated = True
+ self.penalty = 0
+ else:
+ self.penalty += 1
+ self.flags.updated = False
+
+ return bool(updated_dimensions)
+
+ def obsolete(self):
+ self.flags.obsoleted = True
+ if self.flags.created:
+ safe_print(CHART_OBSOLETE.format(**self.params))
+
+ def refresh(self):
+ self.penalty = 0
+ self.flags.push = True
+ self.flags.obsoleted = False
+
+
+class Dimension:
+ """Represent a dimension"""
+ def __init__(self, params):
+ """
+ :param params: <list>
+ """
+ if not isinstance(params, list):
+ raise ItemTypeError("'dimension' must be a list type")
+ if not params:
+ raise ItemValueError("invalid value for 'dimension', must be {0}".format(DIMENSION_PARAMS))
+
+ self.params = dict(zip(DIMENSION_PARAMS, (p or str() for p in params)))
+ self.params['name'] = self.params.get('name') or self.params['id']
+
+ if self.params.get('algorithm') not in DIMENSION_ALGORITHMS:
+ self.params['algorithm'] = 'absolute'
+ if not isinstance(self.params.get('multiplier'), int):
+ self.params['multiplier'] = 1
+ if not isinstance(self.params.get('divisor'), int):
+ self.params['divisor'] = 1
+ self.params.setdefault('hidden', '')
+
+ def __getattr__(self, item):
+ try:
+ return self.params[item]
+ except KeyError:
+ raise AttributeError("'{instance}' has no attribute '{attr}'".format(instance=repr(self),
+ attr=item))
+
+ def __repr__(self):
+ return 'Dimension({0})'.format(self.id)
+
+ def __str__(self):
+ return self.id
+
+ def __eq__(self, other):
+ if not isinstance(other, Dimension):
+ return self.id == other
+ return self.id == other.id
+
+ def create(self):
+ return DIMENSION_CREATE.format(**self.params)
+
+ def set(self, value):
+ """
+ :param value: <str>: must be a digit
+ :return:
+ """
+ return DIMENSION_SET.format(id=self.id,
+ value=value)
+
+ def get_value(self, data):
+ try:
+ return int(data[self.id])
+ except (KeyError, TypeError):
+ return None
+
+
+class ChartVariable:
+ """Represent a chart variable"""
+ def __init__(self, params):
+ """
+ :param params: <list>
+ """
+ if not isinstance(params, list):
+ raise ItemTypeError("'variable' must be a list type")
+ if not params:
+ raise ItemValueError("invalid value for 'variable' must be: {0}".format(VARIABLE_PARAMS))
+
+ self.params = dict(zip(VARIABLE_PARAMS, params))
+ self.params.setdefault('value', None)
+
+ def __getattr__(self, item):
+ try:
+ return self.params[item]
+ except KeyError:
+ raise AttributeError("'{instance}' has no attribute '{attr}'".format(instance=repr(self),
+ attr=item))
+
+ def __bool__(self):
+ return self.value is not None
+
+ def __nonzero__(self):
+ return self.__bool__()
+
+ def __repr__(self):
+ return 'ChartVariable({0})'.format(self.id)
+
+ def __str__(self):
+ return self.id
+
+ def __eq__(self, other):
+ if isinstance(other, ChartVariable):
+ return self.id == other.id
+ return False
+
+ def __hash__(self):
+ return hash(repr(self))
+
+ def set(self, value):
+ return CHART_VARIABLE_SET.format(id=self.id,
+ value=value)
+
+ def get_value(self, data):
+ try:
+ return int(data[self.id])
+ except (KeyError, TypeError):
+ return None
+
+
+class ChartFlags:
+ def __init__(self):
+ self.push = True
+ self.created = False
+ self.updated = False
+ self.obsoleted = False
diff --git a/python.d/python_modules/bases/collection.py b/python.d/python_modules/bases/collection.py
new file mode 100644
index 00000000..e03b4f58
--- /dev/null
+++ b/python.d/python_modules/bases/collection.py
@@ -0,0 +1,144 @@
+# -*- coding: utf-8 -*-
+# Description:
+# Author: Ilya Mashchenko (l2isbad)
+
+import os
+
+PATH = os.getenv('PATH', '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin').split(':')
+
+CHART_BEGIN = 'BEGIN {0} {1}\n'
+CHART_CREATE = "CHART {0} '{1}' '{2}' '{3}' '{4}' '{5}' {6} {7} {8}\n"
+DIMENSION_CREATE = "DIMENSION '{0}' '{1}' {2} {3} {4} '{5}'\n"
+DIMENSION_SET = "SET '{0}' = {1}\n"
+
+
+def setdefault_values(config, base_dict):
+ for key, value in base_dict.items():
+ config.setdefault(key, value)
+ return config
+
+
+def run_and_exit(func):
+ def wrapper(*args, **kwargs):
+ func(*args, **kwargs)
+ exit(1)
+ return wrapper
+
+
+def on_try_except_finally(on_except=(None, ), on_finally=(None, )):
+ except_func = on_except[0]
+ finally_func = on_finally[0]
+
+ def decorator(func):
+ def wrapper(*args, **kwargs):
+ try:
+ func(*args, **kwargs)
+ except Exception:
+ if except_func:
+ except_func(*on_except[1:])
+ finally:
+ if finally_func:
+ finally_func(*on_finally[1:])
+ return wrapper
+ return decorator
+
+
+def static_vars(**kwargs):
+ def decorate(func):
+ for k in kwargs:
+ setattr(func, k, kwargs[k])
+ return func
+ return decorate
+
+
+@on_try_except_finally(on_except=(exit, 1))
+def safe_print(*msg):
+ """
+ :param msg:
+ :return:
+ """
+ print(''.join(msg))
+
+
+def find_binary(binary):
+ """
+ :param binary: <str>
+ :return:
+ """
+ for directory in PATH:
+ binary_name = '/'.join([directory, binary])
+ if os.path.isfile(binary_name) and os.access(binary_name, os.X_OK):
+ return binary_name
+ return None
+
+
+def read_last_line(f):
+ with open(f, 'rb') as opened:
+ opened.seek(-2, 2)
+ while opened.read(1) != b'\n':
+ opened.seek(-2, 1)
+ if opened.tell() == 0:
+ break
+ result = opened.readline()
+ return result.decode()
+
+
+class OldVersionCompatibility:
+
+ def __init__(self):
+ self._data_stream = str()
+
+ def begin(self, type_id, microseconds=0):
+ """
+ :param type_id: <str>
+ :param microseconds: <str> or <int>: must be a digit
+ :return:
+ """
+ self._data_stream += CHART_BEGIN.format(type_id, microseconds)
+
+ def set(self, dim_id, value):
+ """
+ :param dim_id: <str>
+ :param value: <int> or <str>: must be a digit
+ :return:
+ """
+ self._data_stream += DIMENSION_SET.format(dim_id, value)
+
+ def end(self):
+ self._data_stream += 'END\n'
+
+ def chart(self, type_id, name='', title='', units='', family='', category='', chart_type='line',
+ priority='', update_every=''):
+ """
+ :param type_id: <str>
+ :param name: <str>
+ :param title: <str>
+ :param units: <str>
+ :param family: <str>
+ :param category: <str>
+ :param chart_type: <str>
+ :param priority: <str> or <int>
+ :param update_every: <str> or <int>
+ :return:
+ """
+ self._data_stream += CHART_CREATE.format(type_id, name, title, units,
+ family, category, chart_type,
+ priority, update_every)
+
+ def dimension(self, dim_id, name=None, algorithm="absolute", multiplier=1, divisor=1, hidden=False):
+ """
+ :param dim_id: <str>
+ :param name: <str> or None
+ :param algorithm: <str>
+ :param multiplier: <str> or <int>: must be a digit
+ :param divisor: <str> or <int>: must be a digit
+ :param hidden: <str>: literally "hidden" or ""
+ :return:
+ """
+ self._data_stream += DIMENSION_CREATE.format(dim_id, name or dim_id, algorithm,
+ multiplier, divisor, hidden or str())
+
+ @on_try_except_finally(on_except=(exit, 1))
+ def commit(self):
+ print(self._data_stream)
+ self._data_stream = str()
diff --git a/python.d/python_modules/bases/loaders.py b/python.d/python_modules/bases/loaders.py
new file mode 100644
index 00000000..d18b9dcd
--- /dev/null
+++ b/python.d/python_modules/bases/loaders.py
@@ -0,0 +1,66 @@
+# -*- coding: utf-8 -*-
+# Description:
+# Author: Ilya Mashchenko (l2isbad)
+
+import types
+from sys import version_info
+
+PY_VERSION = version_info[:2]
+
+if PY_VERSION > (3, 1):
+ from pyyaml3 import SafeLoader as YamlSafeLoader
+ from importlib.machinery import SourceFileLoader
+ DEFAULT_MAPPING_TAG = 'tag:yaml.org,2002:map'
+else:
+ from pyyaml2 import SafeLoader as YamlSafeLoader
+ from imp import load_source as SourceFileLoader
+ DEFAULT_MAPPING_TAG = u'tag:yaml.org,2002:map'
+
+try:
+ from collections import OrderedDict
+except ImportError:
+ from third_party.ordereddict import OrderedDict
+
+
+def dict_constructor(loader, node):
+ return OrderedDict(loader.construct_pairs(node))
+
+
+YamlSafeLoader.add_constructor(DEFAULT_MAPPING_TAG, dict_constructor)
+
+
+class YamlOrderedLoader:
+ @staticmethod
+ def load_config_from_file(file_name):
+ opened, loaded = False, False
+ try:
+ stream = open(file_name, 'r')
+ opened = True
+ loader = YamlSafeLoader(stream)
+ loaded = True
+ parsed = loader.get_single_data() or dict()
+ except Exception as error:
+ return dict(), error
+ else:
+ return parsed, None
+ finally:
+ if opened:
+ stream.close()
+ if loaded:
+ loader.dispose()
+
+
+class SourceLoader:
+ @staticmethod
+ def load_module_from_file(name, path):
+ try:
+ loaded = SourceFileLoader(name, path)
+ if isinstance(loaded, types.ModuleType):
+ return loaded, None
+ return loaded.load_module(), None
+ except Exception as error:
+ return None, error
+
+
+class ModuleAndConfigLoader(YamlOrderedLoader, SourceLoader):
+ pass
diff --git a/python.d/python_modules/bases/loggers.py b/python.d/python_modules/bases/loggers.py
new file mode 100644
index 00000000..fc40b83d
--- /dev/null
+++ b/python.d/python_modules/bases/loggers.py
@@ -0,0 +1,205 @@
+# -*- coding: utf-8 -*-
+# Description:
+# Author: Ilya Mashchenko (l2isbad)
+
+import logging
+import traceback
+
+from sys import exc_info
+
+try:
+ from time import monotonic as time
+except ImportError:
+ from time import time
+
+from bases.collection import on_try_except_finally
+
+
+LOGGING_LEVELS = {'CRITICAL': 50,
+ 'ERROR': 40,
+ 'WARNING': 30,
+ 'INFO': 20,
+ 'DEBUG': 10,
+ 'NOTSET': 0}
+
+DEFAULT_LOG_LINE_FORMAT = '%(asctime)s: %(name)s %(levelname)s : %(message)s'
+DEFAULT_LOG_TIME_FORMAT = '%Y-%m-%d %H:%M:%S'
+
+PYTHON_D_LOG_LINE_FORMAT = '%(asctime)s: %(name)s %(levelname)s: %(module_name)s: %(job_name)s: %(message)s'
+PYTHON_D_LOG_NAME = 'python.d'
+
+
+def limiter(log_max_count=30, allowed_in_seconds=60):
+ def on_decorator(func):
+
+ def on_call(*args):
+ current_time = args[0]._runtime_counters.START_RUN
+ lc = args[0]._logger_counters
+
+ if lc.logged and lc.logged % log_max_count == 0:
+ if current_time - lc.time_to_compare <= allowed_in_seconds:
+ lc.dropped += 1
+ return
+ lc.time_to_compare = current_time
+
+ lc.logged += 1
+ func(*args)
+
+ return on_call
+ return on_decorator
+
+
+def add_traceback(func):
+ def on_call(*args):
+ self = args[0]
+
+ if not self.log_traceback:
+ func(*args)
+ else:
+ if exc_info()[0]:
+ func(*args)
+ func(self, traceback.format_exc())
+ else:
+ func(*args)
+
+ return on_call
+
+
+class LoggerCounters:
+ def __init__(self):
+ self.logged = 0
+ self.dropped = 0
+ self.time_to_compare = time()
+
+ def __repr__(self):
+ return 'LoggerCounter(logged: {logged}, dropped: {dropped})'.format(logged=self.logged,
+ dropped=self.dropped)
+
+
+class BaseLogger(object):
+ def __init__(self, logger_name, log_fmt=DEFAULT_LOG_LINE_FORMAT, date_fmt=DEFAULT_LOG_TIME_FORMAT,
+ handler=logging.StreamHandler):
+ """
+ :param logger_name: <str>
+ :param log_fmt: <str>
+ :param date_fmt: <str>
+ :param handler: <logging handler>
+ """
+ self.logger = logging.getLogger(logger_name)
+ if not self.has_handlers():
+ self.severity = 'INFO'
+ self.logger.addHandler(handler())
+ self.set_formatter(fmt=log_fmt, date_fmt=date_fmt)
+
+ def __repr__(self):
+ return '<Logger: {name})>'.format(name=self.logger.name)
+
+ def set_formatter(self, fmt, date_fmt=DEFAULT_LOG_TIME_FORMAT):
+ """
+ :param fmt: <str>
+ :param date_fmt: <str>
+ :return:
+ """
+ if self.has_handlers():
+ self.logger.handlers[0].setFormatter(logging.Formatter(fmt=fmt, datefmt=date_fmt))
+
+ def has_handlers(self):
+ return self.logger.handlers
+
+ @property
+ def severity(self):
+ return self.logger.getEffectiveLevel()
+
+ @severity.setter
+ def severity(self, level):
+ """
+ :param level: <str> or <int>
+ :return:
+ """
+ if level in LOGGING_LEVELS:
+ self.logger.setLevel(LOGGING_LEVELS[level])
+
+ def debug(self, *msg, **kwargs):
+ self.logger.debug(' '.join(map(str, msg)), **kwargs)
+
+ def info(self, *msg, **kwargs):
+ self.logger.info(' '.join(map(str, msg)), **kwargs)
+
+ def warning(self, *msg, **kwargs):
+ self.logger.warning(' '.join(map(str, msg)), **kwargs)
+
+ def error(self, *msg, **kwargs):
+ self.logger.error(' '.join(map(str, msg)), **kwargs)
+
+ def alert(self, *msg, **kwargs):
+ self.logger.critical(' '.join(map(str, msg)), **kwargs)
+
+ @on_try_except_finally(on_finally=(exit, 1))
+ def fatal(self, *msg, **kwargs):
+ self.logger.critical(' '.join(map(str, msg)), **kwargs)
+
+
+class PythonDLogger(object):
+ def __init__(self, logger_name=PYTHON_D_LOG_NAME, log_fmt=PYTHON_D_LOG_LINE_FORMAT):
+ """
+ :param logger_name: <str>
+ :param log_fmt: <str>
+ """
+ self.logger = BaseLogger(logger_name, log_fmt=log_fmt)
+ self.module_name = 'plugin'
+ self.job_name = 'main'
+ self._logger_counters = LoggerCounters()
+
+ _LOG_TRACEBACK = False
+
+ @property
+ def log_traceback(self):
+ return PythonDLogger._LOG_TRACEBACK
+
+ @log_traceback.setter
+ def log_traceback(self, value):
+ PythonDLogger._LOG_TRACEBACK = value
+
+ def debug(self, *msg):
+ self.logger.debug(*msg, extra={'module_name': self.module_name,
+ 'job_name': self.job_name or self.module_name})
+
+ def info(self, *msg):
+ self.logger.info(*msg, extra={'module_name': self.module_name,
+ 'job_name': self.job_name or self.module_name})
+
+ def warning(self, *msg):
+ self.logger.warning(*msg, extra={'module_name': self.module_name,
+ 'job_name': self.job_name or self.module_name})
+
+ @add_traceback
+ def error(self, *msg):
+ self.logger.error(*msg, extra={'module_name': self.module_name,
+ 'job_name': self.job_name or self.module_name})
+
+ @add_traceback
+ def alert(self, *msg):
+ self.logger.alert(*msg, extra={'module_name': self.module_name,
+ 'job_name': self.job_name or self.module_name})
+
+ def fatal(self, *msg):
+ self.logger.fatal(*msg, extra={'module_name': self.module_name,
+ 'job_name': self.job_name or self.module_name})
+
+
+class PythonDLimitedLogger(PythonDLogger):
+ @limiter()
+ def info(self, *msg):
+ PythonDLogger.info(self, *msg)
+
+ @limiter()
+ def warning(self, *msg):
+ PythonDLogger.warning(self, *msg)
+
+ @limiter()
+ def error(self, *msg):
+ PythonDLogger.error(self, *msg)
+
+ @limiter()
+ def alert(self, *msg):
+ PythonDLogger.alert(self, *msg)
diff --git a/python.d/python_modules/msg.py b/python.d/python_modules/msg.py
deleted file mode 100644
index 74716770..00000000
--- a/python.d/python_modules/msg.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# -*- coding: utf-8 -*-
-# Description: logging for netdata python.d modules
-
-import traceback
-import sys
-from time import time, strftime
-
-DEBUG_FLAG = False
-TRACE_FLAG = False
-PROGRAM = ""
-LOG_COUNTER = 0
-LOG_THROTTLE = 10000 # has to be too big during init
-LOG_INTERVAL = 1 # has to be too low during init
-LOG_NEXT_CHECK = 0
-
-WRITE = sys.stderr.write
-FLUSH = sys.stderr.flush
-
-
-def log_msg(msg_type, *args):
- """
- Print message on stderr.
- :param msg_type: str
- """
- global LOG_COUNTER
- global LOG_THROTTLE
- global LOG_INTERVAL
- global LOG_NEXT_CHECK
- now = time()
-
- if not DEBUG_FLAG:
- LOG_COUNTER += 1
-
- # WRITE("COUNTER " + str(LOG_COUNTER) + " THROTTLE " + str(LOG_THROTTLE) + " INTERVAL " + str(LOG_INTERVAL) + " NOW " + str(now) + " NEXT " + str(LOG_NEXT_CHECK) + "\n")
-
- if LOG_COUNTER <= LOG_THROTTLE or msg_type == "FATAL" or msg_type == "ALERT":
- timestamp = strftime('%Y-%m-%d %X')
- msg = "%s: %s %s: %s" % (timestamp, PROGRAM, str(msg_type), " ".join(args))
- WRITE(msg + "\n")
- FLUSH()
- elif LOG_COUNTER == LOG_THROTTLE + 1:
- timestamp = strftime('%Y-%m-%d %X')
- msg = "%s: python.d.plugin: throttling further log messages for %s seconds" % (timestamp, str(int(LOG_NEXT_CHECK + 0.5) - int(now)))
- WRITE(msg + "\n")
- FLUSH()
-
- if LOG_NEXT_CHECK <= now:
- if LOG_COUNTER >= LOG_THROTTLE:
- timestamp = strftime('%Y-%m-%d %X')
- msg = "%s: python.d.plugin: Prevented %s log messages from displaying" % (timestamp, str(LOG_COUNTER - LOG_THROTTLE))
- WRITE(msg + "\n")
- FLUSH()
- LOG_NEXT_CHECK = now - (now % LOG_INTERVAL) + LOG_INTERVAL
- LOG_COUNTER = 0
-
- if TRACE_FLAG:
- if msg_type == "FATAL" or msg_type == "ERROR" or msg_type == "ALERT":
- traceback.print_exc()
-
-
-def debug(*args):
- """
- Print debug message on stderr.
- """
- if not DEBUG_FLAG:
- return
-
- log_msg("DEBUG", *args)
-
-
-def error(*args):
- """
- Print message on stderr.
- """
- log_msg("ERROR", *args)
-
-
-def alert(*args):
- """
- Print message on stderr.
- """
- log_msg("ALERT", *args)
-
-
-def info(*args):
- """
- Print message on stderr.
- """
- log_msg("INFO", *args)
-
-
-def fatal(*args):
- """
- Print message on stderr and exit.
- """
- try:
- log_msg("FATAL", *args)
- print('DISABLE')
- except:
- pass
- sys.exit(1)
diff --git a/python.d/python_modules/third_party/__init__.py b/python.d/python_modules/third_party/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/python.d/python_modules/third_party/__init__.py
diff --git a/python.d/python_modules/lm_sensors.py b/python.d/python_modules/third_party/lm_sensors.py
index 1d868f0e..1d868f0e 100644
--- a/python.d/python_modules/lm_sensors.py
+++ b/python.d/python_modules/third_party/lm_sensors.py
diff --git a/python.d/python_modules/third_party/ordereddict.py b/python.d/python_modules/third_party/ordereddict.py
new file mode 100644
index 00000000..d0b97d47
--- /dev/null
+++ b/python.d/python_modules/third_party/ordereddict.py
@@ -0,0 +1,128 @@
+# Copyright (c) 2009 Raymond Hettinger
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation files
+# (the "Software"), to deal in the Software without restriction,
+# including without limitation the rights to use, copy, modify, merge,
+# publish, distribute, sublicense, and/or sell copies of the Software,
+# and to permit persons to whom the Software is furnished to do so,
+# subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+
+from UserDict import DictMixin
+
+
+class OrderedDict(dict, DictMixin):
+
+ def __init__(self, *args, **kwds):
+ if len(args) > 1:
+ raise TypeError('expected at most 1 arguments, got %d' % len(args))
+ try:
+ self.__end
+ except AttributeError:
+ self.clear()
+ self.update(*args, **kwds)
+
+ def clear(self):
+ self.__end = end = []
+ end += [None, end, end] # sentinel node for doubly linked list
+ self.__map = {} # key --> [key, prev, next]
+ dict.clear(self)
+
+ def __setitem__(self, key, value):
+ if key not in self:
+ end = self.__end
+ curr = end[1]
+ curr[2] = end[1] = self.__map[key] = [key, curr, end]
+ dict.__setitem__(self, key, value)
+
+ def __delitem__(self, key):
+ dict.__delitem__(self, key)
+ key, prev, next = self.__map.pop(key)
+ prev[2] = next
+ next[1] = prev
+
+ def __iter__(self):
+ end = self.__end
+ curr = end[2]
+ while curr is not end:
+ yield curr[0]
+ curr = curr[2]
+
+ def __reversed__(self):
+ end = self.__end
+ curr = end[1]
+ while curr is not end:
+ yield curr[0]
+ curr = curr[1]
+
+ def popitem(self, last=True):
+ if not self:
+ raise KeyError('dictionary is empty')
+ if last:
+ key = reversed(self).next()
+ else:
+ key = iter(self).next()
+ value = self.pop(key)
+ return key, value
+
+ def __reduce__(self):
+ items = [[k, self[k]] for k in self]
+ tmp = self.__map, self.__end
+ del self.__map, self.__end
+ inst_dict = vars(self).copy()
+ self.__map, self.__end = tmp
+ if inst_dict:
+ return self.__class__, (items,), inst_dict
+ return self.__class__, (items,)
+
+ def keys(self):
+ return list(self)
+
+ setdefault = DictMixin.setdefault
+ update = DictMixin.update
+ pop = DictMixin.pop
+ values = DictMixin.values
+ items = DictMixin.items
+ iterkeys = DictMixin.iterkeys
+ itervalues = DictMixin.itervalues
+ iteritems = DictMixin.iteritems
+
+ def __repr__(self):
+ if not self:
+ return '%s()' % (self.__class__.__name__,)
+ return '%s(%r)' % (self.__class__.__name__, self.items())
+
+ def copy(self):
+ return self.__class__(self)
+
+ @classmethod
+ def fromkeys(cls, iterable, value=None):
+ d = cls()
+ for key in iterable:
+ d[key] = value
+ return d
+
+ def __eq__(self, other):
+ if isinstance(other, OrderedDict):
+ if len(self) != len(other):
+ return False
+ for p, q in zip(self.items(), other.items()):
+ if p != q:
+ return False
+ return True
+ return dict.__eq__(self, other)
+
+ def __ne__(self, other):
+ return not self == other
diff --git a/python.d/rabbitmq.chart.py b/python.d/rabbitmq.chart.py
index 76391203..eef472bf 100644
--- a/python.d/rabbitmq.chart.py
+++ b/python.d/rabbitmq.chart.py
@@ -11,7 +11,7 @@ try:
except ImportError:
from Queue import Queue
-from base import UrlService
+from bases.FrameworkServices.UrlService import UrlService
# default module values (can be overridden per job in `config`)
update_every = 1
diff --git a/python.d/redis.chart.py b/python.d/redis.chart.py
index 7c3c43f5..bcfcf16a 100644
--- a/python.d/redis.chart.py
+++ b/python.d/redis.chart.py
@@ -2,10 +2,9 @@
# Description: redis netdata python.d module
# Author: Pawel Krupa (paulfantom)
-from base import SocketService
+from bases.FrameworkServices.SocketService import SocketService
# default module values (can be overridden per job in `config`)
-#update_every = 2
priority = 60000
retries = 60
@@ -19,7 +18,8 @@ retries = 60
# 'unix_socket': None
# }}
-ORDER = ['operations', 'hit_rate', 'memory', 'keys', 'net', 'connections', 'clients', 'slaves', 'persistence']
+ORDER = ['operations', 'hit_rate', 'memory', 'keys', 'net', 'connections', 'clients', 'slaves', 'persistence',
+ 'bgsave_now', 'bgsave_health']
CHARTS = {
'operations': {
@@ -72,6 +72,18 @@ CHARTS = {
'redis.rdb_changes', 'line'],
'lines': [
['rdb_changes_since_last_save', 'changes', 'absolute']
+ ]},
+ 'bgsave_now': {
+ 'options': [None, 'Duration of the RDB Save Operation', 'seconds', 'persistence',
+ 'redis.bgsave_now', 'absolute'],
+ 'lines': [
+ ['rdb_bgsave_in_progress', 'rdb save', 'absolute']
+ ]},
+ 'bgsave_health': {
+ 'options': [None, 'Status of the Last RDB Save Operation', 'status', 'persistence',
+ 'redis.bgsave_health', 'line'],
+ 'lines': [
+ ['rdb_last_bgsave_status', 'rdb save', 'absolute']
]}
}
@@ -87,6 +99,7 @@ class Service(SocketService):
self.port = self.configuration.get('port', 6379)
self.unix_socket = self.configuration.get('socket')
password = self.configuration.get('pass', str())
+ self.bgsave_time = 0
self.requests = dict(request='INFO\r\n'.encode(),
password=' '.join(['AUTH', password, '\r\n']).encode() if password else None)
self.request = self.requests['request']
@@ -130,9 +143,8 @@ class Service(SocketService):
data[t[0]] = t[1]
except (IndexError, ValueError):
self.debug("invalid line received: " + str(line))
- pass
- if len(data) == 0:
+ if not data:
self.error("received data doesn't have any records")
return None
@@ -142,6 +154,14 @@ class Service(SocketService):
except (KeyError, ZeroDivisionError, TypeError):
data['hit_rate'] = 0
+ if data['rdb_bgsave_in_progress'] != '0\r':
+ self.bgsave_time += self.update_every
+ else:
+ self.bgsave_time = 0
+
+ data['rdb_last_bgsave_status'] = 0 if data['rdb_last_bgsave_status'] == 'ok\r' else 1
+ data['rdb_bgsave_in_progress'] = self.bgsave_time
+
return data
def _check_raw_data(self, data):
@@ -170,9 +190,6 @@ class Service(SocketService):
Parse configuration, check if redis is available, and dynamically create chart lines data
:return: boolean
"""
- if self.name == "":
- self.name = "local"
- self.chart_name += "_" + self.name
data = self._get_data()
if data is None:
return False
diff --git a/python.d/retroshare.chart.py b/python.d/retroshare.chart.py
index 0c97973f..8c0330ec 100644
--- a/python.d/retroshare.chart.py
+++ b/python.d/retroshare.chart.py
@@ -2,9 +2,10 @@
# Description: RetroShare netdata python.d module
# Authors: sehraf
-from base import UrlService
import json
+from bases.FrameworkServices.UrlService import UrlService
+
# default module values (can be overridden per job in `config`)
# update_every = 2
priority = 60000
@@ -38,10 +39,7 @@ CHARTS = {
class Service(UrlService):
def __init__(self, configuration=None, name=None):
UrlService.__init__(self, configuration=configuration, name=name)
- try:
- self.baseurl = str(self.configuration['url'])
- except (KeyError, TypeError):
- self.baseurl = 'http://localhost:9090'
+ self.baseurl = self.configuration.get('url', 'http://localhost:9090')
self.order = ORDER
self.definitions = CHARTS
@@ -55,7 +53,7 @@ class Service(UrlService):
parsed = json.loads(raw)
if str(parsed['returncode']) != 'ok':
return None
- except:
+ except (TypeError, ValueError):
return None
return parsed['data'][0]
diff --git a/python.d/samba.chart.py b/python.d/samba.chart.py
index 767c9746..3f4fd5a1 100644
--- a/python.d/samba.chart.py
+++ b/python.d/samba.chart.py
@@ -15,9 +15,11 @@
# a lot there, bring one or more out into its own chart and make it incremental
# (like find and notify... good examples).
-from base import ExecutableService
import re
+from bases.collection import find_binary
+from bases.FrameworkServices.ExecutableService import ExecutableService
+
# default module values (can be overridden per job in `config`)
update_every = 5
priority = 60000
@@ -94,10 +96,10 @@ class Service(ExecutableService):
self.rgx_smb2 = re.compile(r'(smb2_[^:]+|syscall_.*file_bytes):\s+(\d+)')
def check(self):
- sudo_binary, smbstatus_binary = self.find_binary('sudo'), self.find_binary('smbstatus')
+ sudo_binary, smbstatus_binary = find_binary('sudo'), find_binary('smbstatus')
if not (sudo_binary and smbstatus_binary):
- self.error('Can\'t locate \'sudo\' or \'smbstatus\' binary')
+ self.error("Can\'t locate 'sudo' or 'smbstatus' binary")
return False
self.command = [sudo_binary, '-v']
diff --git a/python.d/sensors.chart.py b/python.d/sensors.chart.py
index e83aacfd..06e420b6 100644
--- a/python.d/sensors.chart.py
+++ b/python.d/sensors.chart.py
@@ -2,8 +2,8 @@
# Description: sensors netdata python.d plugin
# Author: Pawel Krupa (paulfantom)
-from base import SimpleService
-import lm_sensors as sensors
+from bases.FrameworkServices.SimpleService import SimpleService
+from third_party import lm_sensors as sensors
# default module values (can be overridden per job in `config`)
# update_every = 2
@@ -75,15 +75,12 @@ TYPE_MAP = {
class Service(SimpleService):
def __init__(self, configuration=None, name=None):
SimpleService.__init__(self, configuration=configuration, name=name)
- self.order = []
- self.definitions = {}
- self.celsius = ('Celsius', lambda x: x)
- self.fahrenheit = ('Fahrenheit', lambda x: x * 9 / 5 + 32) if self.configuration.get('fahrenheit') else False
- self.choice = (choice for choice in [self.fahrenheit, self.celsius] if choice)
- self.chips = []
+ self.order = list()
+ self.definitions = dict()
+ self.chips = list()
- def _get_data(self):
- data = {}
+ def get_data(self):
+ data = dict()
try:
for chip in sensors.ChipIterator():
prefix = sensors.chip_snprintf_name(chip)
@@ -92,46 +89,39 @@ class Service(SimpleService):
for sf in sfi:
val = sensors.get_value(chip, sf.number)
break
- typeName = TYPE_MAP[feature.type]
- if typeName in LIMITS:
- limit = LIMITS[typeName];
+ type_name = TYPE_MAP[feature.type]
+ if type_name in LIMITS:
+ limit = LIMITS[type_name]
if val < limit[0] or val > limit[1]:
continue
- if 'temp' in str(feature.name.decode()):
- data[prefix + "_" + str(feature.name.decode())] = int(self.calc(val) * 1000)
- else:
data[prefix + "_" + str(feature.name.decode())] = int(val * 1000)
- except Exception as e:
- self.error(e)
+ except Exception as error:
+ self.error(error)
return None
- if len(data) == 0:
- return None
- return data
+ return data or None
- def _create_definitions(self):
- for type in ORDER:
+ def create_definitions(self):
+ for sensor in ORDER:
for chip in sensors.ChipIterator():
chip_name = sensors.chip_snprintf_name(chip)
- if len(self.chips) != 0 and not any([chip_name.startswith(ex) for ex in self.chips]):
+ if self.chips and not any([chip_name.startswith(ex) for ex in self.chips]):
continue
for feature in sensors.FeatureIterator(chip):
sfi = sensors.SubFeatureIterator(chip, feature)
vals = [sensors.get_value(chip, sf.number) for sf in sfi]
if vals[0] == 0:
continue
- if TYPE_MAP[feature.type] == type:
+ if TYPE_MAP[feature.type] == sensor:
# create chart
name = chip_name + "_" + TYPE_MAP[feature.type]
if name not in self.order:
self.order.append(name)
- chart_def = list(CHARTS[type]['options'])
+ chart_def = list(CHARTS[sensor]['options'])
chart_def[1] = chip_name + chart_def[1]
- if chart_def[2] == 'Celsius':
- chart_def[2] = self.choice[0]
self.definitions[name] = {'options': chart_def}
self.definitions[name]['lines'] = []
- line = list(CHARTS[type]['lines'][0])
+ line = list(CHARTS[sensor]['lines'][0])
line[0] = chip_name + "_" + str(feature.name.decode())
line[1] = sensors.get_label(chip, feature)
self.definitions[name]['lines'].append(line)
@@ -139,23 +129,11 @@ class Service(SimpleService):
def check(self):
try:
sensors.init()
- except Exception as e:
- self.error(e)
+ except Exception as error:
+ self.error(error)
return False
-
- try:
- self.choice = next(self.choice)
- except StopIteration:
- # That can not happen but..
- self.choice = ('Celsius', lambda x: x)
- self.calc = self.choice[1]
- else:
- self.calc = self.choice[1]
- try:
- self._create_definitions()
- except Exception as e:
- self.error(e)
- return False
+ self.create_definitions()
return True
+
diff --git a/python.d/smartd_log.chart.py b/python.d/smartd_log.chart.py
index 4039c153..07ad88cd 100644
--- a/python.d/smartd_log.chart.py
+++ b/python.d/smartd_log.chart.py
@@ -2,221 +2,345 @@
# Description: smart netdata python.d module
# Author: l2isbad, vorph1
-from re import compile as r_compile
-from os import listdir, access, R_OK
-from os.path import isfile, join, getsize, basename, isdir
-try:
- from queue import Queue
-except ImportError:
- from Queue import Queue
-from threading import Thread
-from base import SimpleService
+import os
+import re
+
from collections import namedtuple
+from time import time
-# default module values (can be overridden per job in `config`)
-update_every = 5
-priority = 60000
+from bases.collection import read_last_line
+from bases.FrameworkServices.SimpleService import SimpleService
# charts order (can be overridden if you want less charts, or different order)
ORDER = ['1', '4', '5', '7', '9', '12', '193', '194', '197', '198', '200']
SMART_ATTR = {
- '1': 'Read Error Rate',
- '2': 'Throughput Performance',
- '3': 'Spin-Up Time',
- '4': 'Start/Stop Count',
- '5': 'Reallocated Sectors Count',
- '6': 'Read Channel Margin',
- '7': 'Seek Error Rate',
- '8': 'Seek Time Performance',
- '9': 'Power-On Hours Count',
- '10': 'Spin-up Retries',
- '11': 'Calibration Retries',
- '12': 'Power Cycle Count',
- '13': 'Soft Read Error Rate',
- '100': 'Erase/Program Cycles',
- '103': 'Translation Table Rebuild',
- '108': 'Unknown (108)',
- '170': 'Reserved Block Count',
- '171': 'Program Fail Count',
- '172': 'Erase Fail Count',
- '173': 'Wear Leveller Worst Case Erase Count',
- '174': 'Unexpected Power Loss',
- '175': 'Program Fail Count',
- '176': 'Erase Fail Count',
- '177': 'Wear Leveling Count',
- '178': 'Used Reserved Block Count',
- '179': 'Used Reserved Block Count',
- '180': 'Unused Reserved Block Count',
- '181': 'Program Fail Count',
- '182': 'Erase Fail Count',
- '183': 'SATA Downshifts',
- '184': 'End-to-End error',
- '185': 'Head Stability',
- '186': 'Induced Op-Vibration Detection',
- '187': 'Reported Uncorrectable Errors',
- '188': 'Command Timeout',
- '189': 'High Fly Writes',
- '190': 'Temperature',
- '191': 'G-Sense Errors',
- '192': 'Power-Off Retract Cycles',
- '193': 'Load/Unload Cycles',
- '194': 'Temperature',
- '195': 'Hardware ECC Recovered',
- '196': 'Reallocation Events',
- '197': 'Current Pending Sectors',
- '198': 'Off-line Uncorrectable',
- '199': 'UDMA CRC Error Rate',
- '200': 'Write Error Rate',
- '201': 'Soft Read Errors',
- '202': 'Data Address Mark Errors',
- '203': 'Run Out Cancel',
- '204': 'Soft ECC Corrections',
- '205': 'Thermal Asperity Rate',
- '206': 'Flying Height',
- '207': 'Spin High Current',
- '209': 'Offline Seek Performance',
- '220': 'Disk Shift',
- '221': 'G-Sense Error Rate',
- '222': 'Loaded Hours',
- '223': 'Load/Unload Retries',
- '224': 'Load Friction',
- '225': 'Load/Unload Cycles',
- '226': 'Load-in Time',
- '227': 'Torque Amplification Count',
- '228': 'Power-Off Retracts',
- '230': 'GMR Head Amplitude',
- '231': 'Temperature',
- '232': 'Available Reserved Space',
- '233': 'Media Wearout Indicator',
- '240': 'Head Flying Hours',
- '241': 'Total LBAs Written',
- '242': 'Total LBAs Read',
- '250': 'Read Error Retry Rate'
+ '1': 'Read Error Rate',
+ '2': 'Throughput Performance',
+ '3': 'Spin-Up Time',
+ '4': 'Start/Stop Count',
+ '5': 'Reallocated Sectors Count',
+ '6': 'Read Channel Margin',
+ '7': 'Seek Error Rate',
+ '8': 'Seek Time Performance',
+ '9': 'Power-On Hours Count',
+ '10': 'Spin-up Retries',
+ '11': 'Calibration Retries',
+ '12': 'Power Cycle Count',
+ '13': 'Soft Read Error Rate',
+ '100': 'Erase/Program Cycles',
+ '103': 'Translation Table Rebuild',
+ '108': 'Unknown (108)',
+ '170': 'Reserved Block Count',
+ '171': 'Program Fail Count',
+ '172': 'Erase Fail Count',
+ '173': 'Wear Leveller Worst Case Erase Count',
+ '174': 'Unexpected Power Loss',
+ '175': 'Program Fail Count',
+ '176': 'Erase Fail Count',
+ '177': 'Wear Leveling Count',
+ '178': 'Used Reserved Block Count',
+ '179': 'Used Reserved Block Count',
+ '180': 'Unused Reserved Block Count',
+ '181': 'Program Fail Count',
+ '182': 'Erase Fail Count',
+ '183': 'SATA Downshifts',
+ '184': 'End-to-End error',
+ '185': 'Head Stability',
+ '186': 'Induced Op-Vibration Detection',
+ '187': 'Reported Uncorrectable Errors',
+ '188': 'Command Timeout',
+ '189': 'High Fly Writes',
+ '190': 'Temperature',
+ '191': 'G-Sense Errors',
+ '192': 'Power-Off Retract Cycles',
+ '193': 'Load/Unload Cycles',
+ '194': 'Temperature',
+ '195': 'Hardware ECC Recovered',
+ '196': 'Reallocation Events',
+ '197': 'Current Pending Sectors',
+ '198': 'Off-line Uncorrectable',
+ '199': 'UDMA CRC Error Rate',
+ '200': 'Write Error Rate',
+ '201': 'Soft Read Errors',
+ '202': 'Data Address Mark Errors',
+ '203': 'Run Out Cancel',
+ '204': 'Soft ECC Corrections',
+ '205': 'Thermal Asperity Rate',
+ '206': 'Flying Height',
+ '207': 'Spin High Current',
+ '209': 'Offline Seek Performance',
+ '220': 'Disk Shift',
+ '221': 'G-Sense Error Rate',
+ '222': 'Loaded Hours',
+ '223': 'Load/Unload Retries',
+ '224': 'Load Friction',
+ '225': 'Load/Unload Cycles',
+ '226': 'Load-in Time',
+ '227': 'Torque Amplification Count',
+ '228': 'Power-Off Retracts',
+ '230': 'GMR Head Amplitude',
+ '231': 'Temperature',
+ '232': 'Available Reserved Space',
+ '233': 'Media Wearout Indicator',
+ '240': 'Head Flying Hours',
+ '241': 'Total LBAs Written',
+ '242': 'Total LBAs Read',
+ '250': 'Read Error Retry Rate'
+}
+
+LIMIT = namedtuple('LIMIT', ['min', 'max'])
+
+LIMITS = {
+ '194': LIMIT(0, 200)
}
-NAMED_DISKS = namedtuple('disks', ['name', 'size', 'number'])
+RESCAN_INTERVAL = 60
+
+REGEX = re.compile(
+ '(\d+);' # attribute
+ '(\d+);' # normalized value
+ '(\d+)', # raw value
+ re.X
+)
+
+
+def chart_template(chart_name):
+ units, attr_id = chart_name.split('_')[-2:]
+ title = '{value_type} {description}'.format(value_type=units.capitalize(),
+ description=SMART_ATTR[attr_id])
+ family = SMART_ATTR[attr_id].lower()
+
+ return {
+ chart_name: {
+ 'options': [None, title, units, family, 'smartd_log.' + chart_name, 'line'],
+ 'lines': []
+ }
+ }
+
+
+def handle_os_error(method):
+ def on_call(*args):
+ try:
+ return method(*args)
+ except OSError:
+ return None
+ return on_call
+
+
+class SmartAttribute(object):
+ def __init__(self, idx, normalized, raw):
+ self.id = idx
+ self.normalized = normalized
+ self._raw = raw
+
+ @property
+ def raw(self):
+ if self.id in LIMITS:
+ limit = LIMITS[self.id]
+ if limit.min <= int(self._raw) <= limit.max:
+ return self._raw
+ return None
+ return self._raw
+
+ @raw.setter
+ def raw(self, value):
+ self._raw = value
+
+
+class DiskLogFile:
+ def __init__(self, path):
+ self.path = path
+ self.size = os.path.getsize(path)
+
+ @handle_os_error
+ def is_changed(self):
+ new_size = os.path.getsize(self.path)
+ old_size, self.size = self.size, new_size
+
+ return new_size != old_size and new_size
+
+ @staticmethod
+ @handle_os_error
+ def is_valid(log_file, exclude):
+ return all([log_file.endswith('.csv'),
+ not [p for p in exclude if p in log_file],
+ os.access(log_file, os.R_OK),
+ os.path.getsize(log_file)])
+
+
+class Disk:
+ def __init__(self, full_path, age):
+ self.log_file = DiskLogFile(full_path)
+ self.name = os.path.basename(full_path).split('.')[-3]
+ self.age = int(age)
+ self.status = True
+ self.attributes = dict()
+
+ self.get_attributes()
+
+ def __eq__(self, other):
+ if isinstance(other, Disk):
+ return self.name == other.name
+ return self.name == other
+
+ @handle_os_error
+ def is_active(self):
+ return (time() - os.path.getmtime(self.log_file.path)) / 60 < self.age
+
+ @handle_os_error
+ def get_attributes(self):
+ last_line = read_last_line(self.log_file.path)
+ self.attributes = dict((attr, SmartAttribute(attr, normalized, raw)) for attr, normalized, raw
+ in REGEX.findall(last_line))
+ return True
+
+ def data(self):
+ data = dict()
+ for attr in self.attributes.values():
+ data['_'.join([self.name, 'normalized', attr.id])] = attr.normalized
+ if attr.raw is not None:
+ data['_'.join([self.name, 'raw', attr.id])] = attr.raw
+ return data
class Service(SimpleService):
def __init__(self, configuration=None, name=None):
SimpleService.__init__(self, configuration=configuration, name=name)
- self.regex = r_compile(r'(\d+);(\d+);(\d+)')
self.log_path = self.configuration.get('log_path', '/var/log/smartd')
- self.raw_values = self.configuration.get('raw_values')
- self.attr = self.configuration.get('smart_attributes', [])
- self.previous_data = dict()
+ self.raw = self.configuration.get('raw_values', True)
+ self.exclude = self.configuration.get('exclude_disks', str()).split()
+ self.age = self.configuration.get('age', 30)
+
+ self.runs = 0
+ self.disks = list()
+ self.order = list()
+ self.definitions = dict()
def check(self):
- # Can\'t start without smartd readable diks log files
- disks = find_disks_in_log_path(self.log_path)
- if not disks:
- self.error('Can\'t locate any smartd log files in %s' % self.log_path)
- return False
-
- # List of namedtuples to track smartd log file size
- self.disks = [NAMED_DISKS(name=disks[i], size=0, number=i) for i in range(len(disks))]
-
- if self._get_data():
- self.create_charts()
- return True
- else:
- self.error('Can\'t collect any data. Sorry.')
- return False
-
- def _get_raw_data(self, queue, disk):
- # The idea is to open a file.
- # Jump to the end.
- # Seek backward until '\n' symbol appears
- # If '\n' is found or it's the beginning of the file
- # readline()! (last or first line)
- with open(disk, 'rb') as f:
- f.seek(-2, 2)
- while f.read(1) != b'\n':
- f.seek(-2, 1)
- if f.tell() == 0:
- break
- result = f.readline()
-
- result = result.decode()
- result = self.regex.findall(result)
-
- queue.put([basename(disk), result])
-
- def _get_data(self):
- threads, result = list(), list()
- queue = Queue()
- to_netdata = dict()
-
- # If the size has not changed there is no reason to poll log files.
- disks = [disk for disk in self.disks if self.size_changed(disk)]
- if disks:
- for disk in disks:
- th = Thread(target=self._get_raw_data, args=(queue, disk.name))
- th.start()
- threads.append(th)
-
- for thread in threads:
- thread.join()
- result.append(queue.get())
+ self.disks = self.scan()
+
+ if not self.disks:
+ return None
+
+ user_defined_sa = self.configuration.get('smart_attributes')
+
+ if user_defined_sa:
+ order = user_defined_sa.split() or ORDER
else:
- # Data from last real poll
- return self.previous_data or None
+ order = ORDER
- for elem in result:
- for a, n, r in elem[1]:
- to_netdata.update({'_'.join([elem[0], a]): r if self.raw_values else n})
+ self.create_charts(order)
- self.previous_data.update(to_netdata)
+ return True
- return to_netdata or None
+ def get_data(self):
+ self.runs += 1
- def size_changed(self, disk):
- # We are not interested in log files:
- # 1. zero size
- # 2. size is not changed since last poll
- try:
- size = getsize(disk.name)
- if size != disk.size and size:
- self.disks[disk.number] = disk._replace(size=size)
- return True
- else:
- return False
- except OSError:
- # Remove unreadable/nonexisting log files from list of disks and previous_data
- self.disks.remove(disk)
- self.previous_data = dict([(k, v) for k, v in self.previous_data.items() if basename(disk.name) not in k])
- return False
+ if self.runs % RESCAN_INTERVAL == 0:
+ self.cleanup_and_rescan()
+
+ data = dict()
+
+ for disk in self.disks:
+
+ if not disk.status:
+ continue
+
+ changed = disk.log_file.is_changed()
+
+ # True = changed, False = unchanged, None = Exception
+ if changed is None:
+ disk.status = False
+ continue
+
+ if changed:
+ success = disk.get_attributes()
+ if not success:
+ disk.status = False
+ continue
+
+ data.update(disk.data())
+
+ return data or None
- def create_charts(self):
+ def create_charts(self, order):
+ for attr in order:
+ raw_name, normalized_name = 'attr_id_raw_' + attr, 'attr_id_normalized_' + attr
+ raw, normalized = chart_template(raw_name), chart_template(normalized_name)
+ self.order.extend([normalized_name, raw_name])
+ self.definitions.update(raw)
+ self.definitions.update(normalized)
- def create_lines(attrid):
- result = list()
for disk in self.disks:
- name = basename(disk.name)
- result.append(['_'.join([name, attrid]), name[:name.index('.')], 'absolute'])
- return result
+ if attr not in disk.attributes:
+ self.debug("'{disk}' has no attribute '{attr_id}'".format(disk=disk.name,
+ attr_id=attr))
+ continue
+ normalized[normalized_name]['lines'].append(['_'.join([disk.name, 'normalized', attr]), disk.name])
- # Use configured attributes, if present. If something goes wrong we don't care.
- order = ORDER
- try:
- order = [attr for attr in self.attr.split() if attr in SMART_ATTR.keys()] or ORDER
- except Exception:
- pass
- self.order = [''.join(['attrid', i]) for i in order]
- self.definitions = dict()
- units = 'raw' if self.raw_values else 'normalized'
-
- for k, v in dict([(k, v) for k, v in SMART_ATTR.items() if k in ORDER]).items():
- self.definitions.update({''.join(['attrid', k]): {
- 'options': [None, v, units, v.lower(), 'smartd.attrid' + k, 'line'],
- 'lines': create_lines(k)}})
-
-def find_disks_in_log_path(log_path):
- # smartd log file is OK if:
- # 1. it is a file
- # 2. file name endswith with 'csv'
- # 3. file is readable
- if not isdir(log_path): return None
- return [join(log_path, f) for f in listdir(log_path)
- if all([isfile(join(log_path, f)), f.endswith('.csv'), access(join(log_path, f), R_OK)])]
+ if not self.raw:
+ continue
+
+ if disk.attributes[attr].raw is not None:
+ raw[raw_name]['lines'].append(['_'.join([disk.name, 'raw', attr]), disk.name])
+ continue
+ self.debug("'{disk}' attribute '{attr_id}' value not in {limits}".format(disk=disk.name,
+ attr_id=attr,
+ limits=LIMITS[attr]))
+
+ def cleanup_and_rescan(self):
+ self.cleanup()
+ new_disks = self.scan(only_new=True)
+
+ for disk in new_disks:
+ valid = False
+
+ for chart in self.charts:
+ value_type, idx = chart.id.split('_')[2:]
+
+ if idx in disk.attributes:
+ valid = True
+ dimension_id = '_'.join([disk.name, value_type, idx])
+
+ if dimension_id in chart:
+ chart.hide_dimension(dimension_id=dimension_id, reverse=True)
+ else:
+ chart.add_dimension([dimension_id, disk.name])
+ if valid:
+ self.disks.append(disk)
+
+ def cleanup(self):
+ for disk in self.disks:
+
+ if not disk.is_active():
+ disk.status = False
+ if not disk.status:
+ for chart in self.charts:
+ dimension_id = '_'.join([disk.name, chart.id[8:]])
+ chart.hide_dimension(dimension_id=dimension_id)
+
+ self.disks = [disk for disk in self.disks if disk.status]
+
+ def scan(self, only_new=None):
+ new_disks = list()
+ for f in os.listdir(self.log_path):
+ full_path = os.path.join(self.log_path, f)
+
+ if DiskLogFile.is_valid(full_path, self.exclude):
+ disk = Disk(full_path, self.age)
+
+ active = disk.is_active()
+ if active is None:
+ continue
+ if active:
+ if not only_new:
+ new_disks.append(disk)
+ else:
+ if disk not in self.disks:
+ new_disks.append(disk)
+ else:
+ if not only_new:
+ self.debug("'{disk}' not updated in the last {age} minutes, "
+ "skipping it.".format(disk=disk.name, age=self.age))
+ return new_disks
diff --git a/python.d/squid.chart.py b/python.d/squid.chart.py
index e9e8f1d0..ba8f982f 100644
--- a/python.d/squid.chart.py
+++ b/python.d/squid.chart.py
@@ -2,8 +2,8 @@
# Description: squid netdata python.d module
# Author: Pawel Krupa (paulfantom)
-from base import SocketService
-import select
+from bases.FrameworkServices.SocketService import SocketService
+
# default module values (can be overridden per job in `config`)
# update_every = 2
@@ -60,7 +60,7 @@ class Service(SocketService):
"""
response = self._get_raw_data()
- data = {}
+ data = dict()
try:
raw = ""
for tmp in response.split('\r\n'):
@@ -81,11 +81,10 @@ class Service(SocketService):
self.error("invalid data received")
return None
- if len(data) == 0:
+ if not data:
self.error("no data received")
return None
- else:
- return data
+ return data
def _check_raw_data(self, data):
header = data[:1024].lower()
diff --git a/python.d/tomcat.chart.py b/python.d/tomcat.chart.py
index 05547236..a570d564 100644
--- a/python.d/tomcat.chart.py
+++ b/python.d/tomcat.chart.py
@@ -2,9 +2,10 @@
# Description: tomcat netdata python.d module
# Author: Pawel Krupa (paulfantom)
-from base import UrlService
import xml.etree.ElementTree as ET
+from bases.FrameworkServices.UrlService import UrlService
+
# default module values (can be overridden per job in `config`)
# update_every = 2
priority = 60000
@@ -71,6 +72,7 @@ CHARTS = {
]},
}
+
class Service(UrlService):
def __init__(self, configuration=None, name=None):
UrlService.__init__(self, configuration=configuration, name=name)
@@ -87,7 +89,6 @@ class Service(UrlService):
data = None
raw_data = self._get_raw_data()
if raw_data:
- xml = None
try:
xml = ET.fromstring(raw_data)
except ET.ParseError:
@@ -100,27 +101,27 @@ class Service(UrlService):
connector = None
if self.connector_name:
for conn in xml.findall('connector'):
- if conn.get('name') == self.connector_name:
+ if self.connector_name in conn.get('name'):
connector = conn
break
else:
connector = xml.find('connector')
memory = jvm.find('memory')
- data['free'] = memory.get('free')
+ data['free'] = memory.get('free')
data['total'] = memory.get('total')
for pool in jvm.findall('memorypool'):
name = pool.get('name')
- if name == 'Eden Space':
+ if 'Eden Space' in name:
data['eden_used'] = pool.get('usageUsed')
data['eden_commited'] = pool.get('usageCommitted')
data['eden_max'] = pool.get('usageMax')
- elif name == 'Survivor Space':
+ elif 'Survivor Space' in name:
data['survivor_used'] = pool.get('usageUsed')
data['survivor_commited'] = pool.get('usageCommitted')
data['survivor_max'] = pool.get('usageMax')
- elif name == 'Tenured Gen':
+ elif 'Tenured Gen' in name or 'Old Gen' in name:
data['tenured_used'] = pool.get('usageUsed')
data['tenured_commited'] = pool.get('usageCommitted')
data['tenured_max'] = pool.get('usageMax')
diff --git a/python.d/varnish.chart.py b/python.d/varnish.chart.py
index 2665bb38..d8145c0b 100644
--- a/python.d/varnish.chart.py
+++ b/python.d/varnish.chart.py
@@ -2,230 +2,240 @@
# Description: varnish netdata python.d module
# Author: l2isbad
-from base import SimpleService
-from re import compile
-from subprocess import Popen, PIPE
+import re
+
+from bases.collection import find_binary
+from bases.FrameworkServices.ExecutableService import ExecutableService
# default module values (can be overridden per job in `config`)
# update_every = 2
priority = 60000
retries = 60
-ORDER = ['session', 'hit_rate', 'chit_rate', 'expunge', 'threads', 'backend_health', 'memory_usage', 'bad', 'uptime']
-
-CHARTS = {'backend_health':
- {'lines': [['backend_conn', 'conn', 'incremental', 1, 1],
- ['backend_unhealthy', 'unhealthy', 'incremental', 1, 1],
- ['backend_busy', 'busy', 'incremental', 1, 1],
- ['backend_fail', 'fail', 'incremental', 1, 1],
- ['backend_reuse', 'reuse', 'incremental', 1, 1],
- ['backend_recycle', 'resycle', 'incremental', 1, 1],
- ['backend_toolate', 'toolate', 'incremental', 1, 1],
- ['backend_retry', 'retry', 'incremental', 1, 1],
- ['backend_req', 'req', 'incremental', 1, 1]],
- 'options': [None, 'Backend health', 'connections', 'Backend health', 'varnish.backend_traf', 'line']},
- 'bad':
- {'lines': [['sess_drop_b', None, 'incremental', 1, 1],
- ['backend_unhealthy_b', None, 'incremental', 1, 1],
- ['fetch_failed', None, 'incremental', 1, 1],
- ['backend_busy_b', None, 'incremental', 1, 1],
- ['threads_failed_b', None, 'incremental', 1, 1],
- ['threads_limited_b', None, 'incremental', 1, 1],
- ['threads_destroyed_b', None, 'incremental', 1, 1],
- ['thread_queue_len_b', 'queue_len', 'absolute', 1, 1],
- ['losthdr_b', None, 'incremental', 1, 1],
- ['esi_errors_b', None, 'incremental', 1, 1],
- ['esi_warnings_b', None, 'incremental', 1, 1],
- ['sess_fail_b', None, 'incremental', 1, 1],
- ['sc_pipe_overflow_b', None, 'incremental', 1, 1],
- ['sess_pipe_overflow_b', None, 'incremental', 1, 1]],
- 'options': [None, 'Misbehavior', 'problems', 'Problems summary', 'varnish.bad', 'line']},
- 'expunge':
- {'lines': [['n_expired', 'expired', 'incremental', 1, 1],
- ['n_lru_nuked', 'lru_nuked', 'incremental', 1, 1]],
- 'options': [None, 'Object expunging', 'objects', 'Cache performance', 'varnish.expunge', 'line']},
- 'hit_rate':
- {'lines': [['cache_hit_perc', 'hit', 'absolute', 1, 100],
- ['cache_miss_perc', 'miss', 'absolute', 1, 100],
- ['cache_hitpass_perc', 'hitpass', 'absolute', 1, 100]],
- 'options': [None, 'All history hit rate ratio','percent', 'Cache performance', 'varnish.hit_rate', 'stacked']},
- 'chit_rate':
- {'lines': [['cache_hit_cperc', 'hit', 'absolute', 1, 100],
- ['cache_miss_cperc', 'miss', 'absolute', 1, 100],
- ['cache_hitpass_cperc', 'hitpass', 'absolute', 1, 100]],
- 'options': [None, 'Current poll hit rate ratio','percent', 'Cache performance', 'varnish.chit_rate', 'stacked']},
- 'memory_usage':
- {'lines': [['s0.g_space', 'available', 'absolute', 1, 1048576],
- ['s0.g_bytes', 'allocated', 'absolute', -1, 1048576]],
- 'options': [None, 'Memory usage', 'megabytes', 'Memory usage', 'varnish.memory_usage', 'stacked']},
- 'session':
- {'lines': [['sess_conn', 'sess_conn', 'incremental', 1, 1],
- ['client_req', 'client_requests', 'incremental', 1, 1],
- ['client_conn', 'client_conn', 'incremental', 1, 1],
- ['client_drop', 'client_drop', 'incremental', 1, 1],
- ['sess_dropped', 'sess_dropped', 'incremental', 1, 1]],
- 'options': [None, 'Sessions', 'units', 'Client metrics', 'varnish.session', 'line']},
- 'threads':
- {'lines': [['threads', None, 'absolute', 1, 1],
- ['threads_created', 'created', 'incremental', 1, 1],
- ['threads_failed', 'failed', 'incremental', 1, 1],
- ['threads_limited', 'limited', 'incremental', 1, 1],
- ['thread_queue_len', 'queue_len', 'incremental', 1, 1],
- ['sess_queued', 'sess_queued', 'incremental', 1, 1]],
- 'options': [None, 'Thread status', 'threads', 'Thread-related metrics', 'varnish.threads', 'line']},
- 'uptime':
- {'lines': [['uptime', None, 'absolute', 1, 1]],
- 'options': [None, 'Varnish uptime', 'seconds', 'Uptime', 'varnish.uptime', 'line']}
+ORDER = ['session_connections', 'client_requests',
+ 'all_time_hit_rate', 'current_poll_hit_rate', 'cached_objects_expired', 'cached_objects_nuked',
+ 'threads_total', 'threads_statistics', 'threads_queue_len',
+ 'backend_connections', 'backend_requests',
+ 'esi_statistics',
+ 'memory_usage',
+ 'uptime']
+
+CHARTS = {
+ 'session_connections': {
+ 'options': [None, 'Connections Statistics', 'connections/s',
+ 'client metrics', 'varnish.session_connection', 'line'],
+ 'lines': [
+ ['sess_conn', 'accepted', 'incremental'],
+ ['sess_dropped', 'dropped', 'incremental']
+ ]
+ },
+ 'client_requests': {
+ 'options': [None, 'Client Requests', 'requests/s',
+ 'client metrics', 'varnish.client_requests', 'line'],
+ 'lines': [
+ ['client_req', 'received', 'incremental']
+ ]
+ },
+ 'all_time_hit_rate': {
+ 'options': [None, 'All History Hit Rate Ratio', 'percent', 'cache performance',
+ 'varnish.all_time_hit_rate', 'stacked'],
+ 'lines': [
+ ['cache_hit', 'hit', 'percentage-of-absolute-row'],
+ ['cache_miss', 'miss', 'percentage-of-absolute-row'],
+ ['cache_hitpass', 'hitpass', 'percentage-of-absolute-row']]
+ },
+ 'current_poll_hit_rate': {
+ 'options': [None, 'Current Poll Hit Rate Ratio', 'percent', 'cache performance',
+ 'varnish.current_poll_hit_rate', 'stacked'],
+ 'lines': [
+ ['cache_hit', 'hit', 'percentage-of-incremental-row'],
+ ['cache_miss', 'miss', 'percentage-of-incremental-row'],
+ ['cache_hitpass', 'hitpass', 'percentage-of-incremental-row']
+ ]
+ },
+ 'cached_objects_expired': {
+ 'options': [None, 'Expired Objects', 'expired/s', 'cache performance',
+ 'varnish.cached_objects_expired', 'line'],
+ 'lines': [
+ ['n_expired', 'objects', 'incremental']
+ ]
+ },
+ 'cached_objects_nuked': {
+ 'options': [None, 'Least Recently Used Nuked Objects', 'nuked/s', 'cache performance',
+ 'varnish.cached_objects_nuked', 'line'],
+ 'lines': [
+ ['n_lru_nuked', 'objects', 'incremental']
+ ]
+ },
+ 'threads_total': {
+ 'options': [None, 'Number Of Threads In All Pools', 'number', 'thread related metrics',
+ 'varnish.threads_total', 'line'],
+ 'lines': [
+ ['threads', None, 'absolute']
+ ]
+ },
+ 'threads_statistics': {
+ 'options': [None, 'Threads Statistics', 'threads/s', 'thread related metrics',
+ 'varnish.threads_statistics', 'line'],
+ 'lines': [
+ ['threads_created', 'created', 'incremental'],
+ ['threads_failed', 'failed', 'incremental'],
+ ['threads_limited', 'limited', 'incremental']
+ ]
+ },
+ 'threads_queue_len': {
+ 'options': [None, 'Current Queue Length', 'requests', 'thread related metrics',
+ 'varnish.threads_queue_len', 'line'],
+ 'lines': [
+ ['thread_queue_len', 'in queue']
+ ]
+ },
+ 'backend_connections': {
+ 'options': [None, 'Backend Connections Statistics', 'connections/s', 'backend metrics',
+ 'varnish.backend_connections', 'line'],
+ 'lines': [
+ ['backend_conn', 'successful', 'incremental'],
+ ['backend_unhealthy', 'unhealthy', 'incremental'],
+ ['backend_reuse', 'reused', 'incremental'],
+ ['backend_toolate', 'closed', 'incremental'],
+ ['backend_recycle', 'resycled', 'incremental'],
+ ['backend_fail', 'failed', 'incremental']
+ ]
+ },
+ 'backend_requests': {
+ 'options': [None, 'Requests To The Backend', 'requests/s', 'backend metrics',
+ 'varnish.backend_requests', 'line'],
+ 'lines': [
+ ['backend_req', 'sent', 'incremental']
+ ]
+ },
+ 'esi_statistics': {
+ 'options': [None, 'ESI Statistics', 'problems/s', 'esi related metrics', 'varnish.esi_statistics', 'line'],
+ 'lines': [
+ ['esi_errors', 'errors', 'incremental'],
+ ['esi_warnings', 'warnings', 'incremental']
+ ]
+ },
+ 'memory_usage': {
+ 'options': [None, 'Memory Usage', 'MB', 'memory usage', 'varnish.memory_usage', 'stacked'],
+ 'lines': [
+ ['memory_free', 'free', 'absolute', 1, 1 << 20],
+ ['memory_allocated', 'allocated', 'absolute', 1, 1 << 20]]
+ },
+ 'uptime': {
+ 'lines': [
+ ['uptime', None, 'absolute']
+ ],
+ 'options': [None, 'Uptime', 'seconds', 'uptime', 'varnish.uptime', 'line']
+ }
}
-class Service(SimpleService):
+class Parser:
+ _backend_new = re.compile(r'VBE.([\d\w_.]+)\(.*?\).(beresp[\w_]+)\s+(\d+)')
+ _backend_old = re.compile(r'VBE\.[\d\w-]+\.([\w\d_]+).(beresp[\w_]+)\s+(\d+)')
+ _default = re.compile(r'([A-Z]+\.)?([\d\w_.]+)\s+(\d+)')
+
+ def __init__(self):
+ self.re_default = None
+ self.re_backend = None
+
+ def init(self, data):
+ data = ''.join(data)
+ parsed_main = Parser._default.findall(data)
+ if parsed_main:
+ self.re_default = Parser._default
+
+ parsed_backend = Parser._backend_new.findall(data)
+ if parsed_backend:
+ self.re_backend = Parser._backend_new
+ else:
+ parsed_backend = Parser._backend_old.findall(data)
+ if parsed_backend:
+ self.re_backend = Parser._backend_old
+
+ def server_stats(self, data):
+ return self.re_default.findall(''.join(data))
+
+ def backend_stats(self, data):
+ return self.re_backend.findall(''.join(data))
+
+
+class Service(ExecutableService):
def __init__(self, configuration=None, name=None):
- SimpleService.__init__(self, configuration=configuration, name=name)
- self.varnish = self.find_binary('varnishstat')
- self.rgx_all = compile(r'([A-Z]+\.)?([\d\w_.]+)\s+(\d+)')
- # Could be
- # VBE.boot.super_backend.pipe_hdrbyte (new)
- # or
- # VBE.default2(127.0.0.2,,81).bereq_bodybytes (old)
- # Regex result: [('super_backend', 'beresp_hdrbytes', '0'), ('super_backend', 'beresp_bodybytes', '0')]
- self.rgx_bck = (compile(r'VBE.([\d\w_.]+)\(.*?\).(beresp[\w_]+)\s+(\d+)'),
- compile(r'VBE\.[\d\w-]+\.([\w\d_]+).(beresp[\w_]+)\s+(\d+)'))
- self.cache_prev = list()
+ ExecutableService.__init__(self, configuration=configuration, name=name)
+ self.order = ORDER
+ self.definitions = CHARTS
+ varnishstat = find_binary('varnishstat')
+ self.command = [varnishstat, '-1'] if varnishstat else None
+ self.parser = Parser()
def check(self):
- # Cant start without 'varnishstat' command
- if not self.varnish:
- self.error('Can\'t locate \'varnishstat\' binary or binary is not executable by netdata')
+ if not self.command:
+ self.error("Can't locate 'varnishstat' binary or binary is not executable by user netdata")
return False
- # If command is present and we can execute it we need to make sure..
- # 1. STDOUT is not empty
+ # STDOUT is not empty
reply = self._get_raw_data()
if not reply:
- self.error('No output from \'varnishstat\' (not enough privileges?)')
- return False
-
- # 2. Output is parsable (list is not empty after regex findall)
- is_parsable = self.rgx_all.findall(reply)
- if not is_parsable:
- self.error('Cant parse output...')
+ self.error("No output from 'varnishstat'. Not enough privileges?")
return False
- # We need to find the right regex for backend parse
- self.backend_list = self.rgx_bck[0].findall(reply)[::2]
- if self.backend_list:
- self.rgx_bck = self.rgx_bck[0]
- else:
- self.backend_list = self.rgx_bck[1].findall(reply)[::2]
- self.rgx_bck = self.rgx_bck[1]
+ self.parser.init(reply)
- # We are about to start!
- self.create_charts()
+ # Output is parsable
+ if not self.parser.re_default:
+ self.error('Cant parse the output...')
+ return False
- self.info('Plugin was started successfully')
+ if self.parser.re_backend:
+ backends = [b[0] for b in self.parser.backend_stats(reply)[::2]]
+ self.create_backends_charts(backends)
return True
-
- def _get_raw_data(self):
- try:
- reply = Popen([self.varnish, '-1'], stdout=PIPE, stderr=PIPE, shell=False)
- except OSError:
- return None
-
- raw_data = reply.communicate()[0]
- if not raw_data:
- return None
-
- return raw_data.decode()
-
- def _get_data(self):
+ def get_data(self):
"""
Format data received from shell command
:return: dict
"""
- raw_data = self._get_raw_data()
- data_all = self.rgx_all.findall(raw_data)
- data_backend = self.rgx_bck.findall(raw_data)
+ raw = self._get_raw_data()
+ if not raw:
+ return None
- if not data_all:
+ data = dict()
+ server_stats = self.parser.server_stats(raw)
+ if not server_stats:
return None
- # 1. ALL data from 'varnishstat -1'. t - type(MAIN, MEMPOOL etc)
- to_netdata = dict([(k, int(v)) for t, k, v in data_all])
-
- # 2. ADD backend statistics
- to_netdata.update(dict([('_'.join([n, k]), int(v)) for n, k, v in data_backend]))
-
- # 3. ADD additional keys to dict
- # 3.1 Cache hit/miss/hitpass OVERALL in percent
- cache_summary = sum([to_netdata.get('cache_hit', 0), to_netdata.get('cache_miss', 0),
- to_netdata.get('cache_hitpass', 0)])
- to_netdata['cache_hit_perc'] = find_percent(to_netdata.get('cache_hit', 0), cache_summary, 10000)
- to_netdata['cache_miss_perc'] = find_percent(to_netdata.get('cache_miss', 0), cache_summary, 10000)
- to_netdata['cache_hitpass_perc'] = find_percent(to_netdata.get('cache_hitpass', 0), cache_summary, 10000)
-
- # 3.2 Cache hit/miss/hitpass CURRENT in percent
- if self.cache_prev:
- cache_summary = sum([to_netdata.get('cache_hit', 0), to_netdata.get('cache_miss', 0),
- to_netdata.get('cache_hitpass', 0)]) - sum(self.cache_prev)
- to_netdata['cache_hit_cperc'] = find_percent(to_netdata.get('cache_hit', 0) - self.cache_prev[0], cache_summary, 10000)
- to_netdata['cache_miss_cperc'] = find_percent(to_netdata.get('cache_miss', 0) - self.cache_prev[1], cache_summary, 10000)
- to_netdata['cache_hitpass_cperc'] = find_percent(to_netdata.get('cache_hitpass', 0) - self.cache_prev[2], cache_summary, 10000)
- else:
- to_netdata['cache_hit_cperc'] = 0
- to_netdata['cache_miss_cperc'] = 0
- to_netdata['cache_hitpass_cperc'] = 0
-
- self.cache_prev = [to_netdata.get('cache_hit', 0), to_netdata.get('cache_miss', 0), to_netdata.get('cache_hitpass', 0)]
-
- # 3.3 Problems summary chart
- for elem in ['backend_busy', 'backend_unhealthy', 'esi_errors', 'esi_warnings', 'losthdr', 'sess_drop', 'sc_pipe_overflow',
- 'sess_fail', 'sess_pipe_overflow', 'threads_destroyed', 'threads_failed', 'threads_limited', 'thread_queue_len']:
- if to_netdata.get(elem) is not None:
- to_netdata[''.join([elem, '_b'])] = to_netdata.get(elem)
-
- # Ready steady go!
- return to_netdata
-
- def create_charts(self):
- # If 'all_charts' is true...ALL charts are displayed. If no only default + 'extra_charts'
- #if self.configuration.get('all_charts'):
- # self.order = EXTRA_ORDER
- #else:
- # try:
- # extra_charts = list(filter(lambda chart: chart in EXTRA_ORDER, self.extra_charts.split()))
- # except (AttributeError, NameError, ValueError):
- # self.error('Extra charts disabled.')
- # extra_charts = []
-
- self.order = ORDER[:]
- #self.order.extend(extra_charts)
-
- # Create static charts
- #self.definitions = {chart: values for chart, values in CHARTS.items() if chart in self.order}
- self.definitions = CHARTS
-
- # Create dynamic backend charts
- if self.backend_list:
- for backend in self.backend_list:
- self.order.insert(0, ''.join([backend[0], '_resp_stats']))
- self.definitions.update({''.join([backend[0], '_resp_stats']): {
- 'options': [None,
- '%s response statistics' % backend[0].capitalize(),
- "kilobit/s",
- 'Backend response',
- 'varnish.backend',
- 'area'],
- 'lines': [[''.join([backend[0], '_beresp_hdrbytes']),
- 'header', 'incremental', 8, 1000],
- [''.join([backend[0], '_beresp_bodybytes']),
- 'body', 'incremental', -8, 1000]]}})
-
-
-def find_percent(value1, value2, multiply):
- # If value2 is 0 return 0
- if not value2:
- return 0
- else:
- return round(float(value1) / float(value2) * multiply)
+ if self.parser.re_backend:
+ backend_stats = self.parser.backend_stats(raw)
+ data.update(dict(('_'.join([name, param]), value) for name, param, value in backend_stats))
+
+ data.update(dict((param, value) for _, param, value in server_stats))
+
+ data['memory_allocated'] = data['s0.g_bytes']
+ data['memory_free'] = data['s0.g_space']
+
+ return data
+
+ def create_backends_charts(self, backends):
+ for backend in backends:
+ chart_name = ''.join([backend, '_response_statistics'])
+ title = 'Backend "{0}"'.format(backend.capitalize())
+ hdr_bytes = ''.join([backend, '_beresp_hdrbytes'])
+ body_bytes = ''.join([backend, '_beresp_bodybytes'])
+
+ chart = {
+ chart_name:
+ {
+ 'options': [None, title, 'kilobits/s', 'backend response statistics',
+ 'varnish.backend', 'area'],
+ 'lines': [
+ [hdr_bytes, 'header', 'incremental', 8, 1000],
+ [body_bytes, 'body', 'incremental', -8, 1000]
+ ]
+ }
+ }
+
+ self.order.insert(0, chart_name)
+ self.definitions.update(chart)
diff --git a/python.d/web_log.chart.py b/python.d/web_log.chart.py
index a5359bc4..954ecd41 100644
--- a/python.d/web_log.chart.py
+++ b/python.d/web_log.chart.py
@@ -4,22 +4,20 @@
import bisect
import re
+import os
from collections import namedtuple, defaultdict
from copy import deepcopy
-from os import access, R_OK
-from os.path import getsize
try:
from itertools import filterfalse
except ImportError:
+ from itertools import ifilter as filter
from itertools import ifilterfalse as filterfalse
-from base import LogService
-import msg
+from bases.collection import read_last_line
+from bases.FrameworkServices.LogService import LogService
-priority = 60000
-retries = 60
ORDER_APACHE_CACHE = ['apache_cache']
@@ -246,6 +244,8 @@ SQUID_CODES = dict(TCP='squid_transport_methods', UDP='squid_transport_methods',
DENIED='squid_cache_events', NOFETCH='squid_cache_events', TUNNEL='squid_cache_events',
ABORTED='squid_transport_errors', TIMEOUT='squid_transport_errors')
+REQUEST_REGEX = re.compile(r'(?P<method>[A-Z]+) (?P<url>[^ ]+) [A-Z]+/(?P<http_version>\d(?:.\d)?)')
+
class Service(LogService):
def __init__(self, configuration=None, name=None):
@@ -254,8 +254,9 @@ class Service(LogService):
:param name:
"""
LogService.__init__(self, configuration=configuration, name=name)
- self.log_type = self.configuration.get('type', 'web')
+ self.configuration = configuration
self.log_path = self.configuration.get('path')
+ self.job = None
def check(self):
"""
@@ -267,123 +268,43 @@ class Service(LogService):
4. other checks depends on log "type"
"""
+ log_type = self.configuration.get('type', 'web')
log_types = dict(web=Web, apache_cache=ApacheCache, squid=Squid)
- if self.log_type not in log_types:
- self.error('bad log type (%s). Supported types: %s' % (self.log_type, log_types.keys()))
+ if log_type not in log_types:
+ self.error("bad log type {log_type}. Supported types: {types}".format(log_type=log_type,
+ types=log_types.keys()))
return False
if not self.log_path:
self.error('log path is not specified')
return False
- if not (self._find_recent_log_file() and access(self.log_path, R_OK)):
- self.error('%s not readable or not exist' % self.log_path)
+ if not (self._find_recent_log_file() and os.access(self.log_path, os.R_OK)):
+ self.error('{log_file} not readable or not exist'.format(log_file=self.log_path))
return False
- if not getsize(self.log_path):
- self.error('%s is empty' % self.log_path)
+ if not os.path.getsize(self.log_path):
+ self.error('{log_file} is empty'.format(log_file=self.log_path))
return False
- self.configuration['update_every'] = self.update_every
- self.configuration['name'] = self.name
- self.configuration['override_name'] = self.override_name
- self.configuration['_dimensions'] = self._dimensions
- self.configuration['path'] = self.log_path
-
- cls = log_types[self.log_type]
- self.Job = cls(configuration=self.configuration)
- if self.Job.check():
- self.order = self.Job.order
- self.definitions = self.Job.definitions
- self.info('Current log file: %s' % self.log_path)
+ self.job = log_types[log_type](self)
+ if self.job.check():
+ self.order = self.job.order
+ self.definitions = self.job.definitions
return True
return False
def _get_data(self):
- return self.Job.get_data(self._get_raw_data())
-
-
-class Mixin:
- def filter_data(self, raw_data):
- """
- :param raw_data: list
- :return:
- """
- if not self.pre_filter:
- return raw_data
- filtered = raw_data
- for elem in self.pre_filter:
- if elem.description == 'filter_include':
- filtered = filter(elem.func, filtered)
- elif elem.description == 'filter_exclude':
- filtered = filterfalse(elem.func, filtered)
- return filtered
-
- def add_new_dimension(self, dimension_id, chart_key, dimension=None,
- algorithm='incremental', multiplier=1, divisor=1):
- """
- :param dimension:
- :param chart_key:
- :param dimension_id:
- :param algorithm:
- :param multiplier:
- :param divisor:
- :return:
- """
-
- self.data[dimension_id] = 0
- # SET method check if dim in _dimensions
- self.conf['_dimensions'].append(dimension_id)
- # UPDATE method do SET only if dim in definitions
- dimension_list = list(map(str, [dimension_id,
- dimension if dimension else dimension_id,
- algorithm,
- multiplier,
- divisor]))
- self.definitions[chart_key]['lines'].append(dimension_list)
- job_name = find_job_name(self.conf['override_name'], self.conf['name'])
- opts = self.definitions[chart_key]['options']
- chart = 'CHART %s.%s "" "%s" %s "%s" %s %s 60000 %s\n' % (job_name, chart_key,
- opts[1], opts[2], opts[3],
- opts[4], opts[5], self.conf['update_every'])
- print(chart + "DIMENSION %s\n" % ' '.join(dimension_list))
-
- def get_last_line(self):
- """
- Reads last line from the log file
- :return: str:
- """
- # Read last line (or first if there is only one line)
- with open(self.conf['path'], 'rb') as logs:
- logs.seek(-2, 2)
- while logs.read(1) != b'\n':
- logs.seek(-2, 1)
- if logs.tell() == 0:
- break
- last_line = logs.readline()
- try:
- return last_line.decode()
- except UnicodeDecodeError:
- try:
- return last_line.decode(encoding='utf-8')
- except (TypeError, UnicodeDecodeError) as error:
- msg.error('web_log', str(error))
- return False
-
- @staticmethod
- def error(*params):
- msg.error('web_log', ' '.join(map(str, params)))
-
- @staticmethod
- def info(*params):
- msg.info('web_log', ' '.join(map(str, params)))
+ return self.job.get_data(self._get_raw_data())
-class Web(Mixin):
- def __init__(self, configuration):
- self.conf = configuration
- self.pre_filter = check_patterns('filter', self.conf.get('filter'))
+class Web:
+ def __init__(self, service):
+ self.service = service
+ self.order = ORDER_WEB[:]
+ self.definitions = deepcopy(CHARTS_WEB)
+ self.pre_filter = check_patterns('filter', self.configuration.get('filter'))
self.storage = dict()
self.data = {'bytes_sent': 0, 'resp_length': 0, 'resp_time_min': 0, 'resp_time_max': 0,
'resp_time_avg': 0, 'resp_time_upstream_min': 0, 'resp_time_upstream_max': 0,
@@ -392,23 +313,27 @@ class Web(Mixin):
'req_ipv6': 0, 'unique_tot_ipv4': 0, 'unique_tot_ipv6': 0, 'successful_requests': 0,
'redirects': 0, 'bad_requests': 0, 'server_errors': 0, 'other_requests': 0, 'GET': 0}
+ def __getattr__(self, item):
+ return getattr(self.service, item)
+
def check(self):
- last_line = self.get_last_line()
+ last_line = read_last_line(self.log_path)
if not last_line:
return False
# Custom_log_format or predefined log format.
- if self.conf.get('custom_log_format'):
+ if self.configuration.get('custom_log_format'):
match_dict, error = self.find_regex_custom(last_line)
else:
match_dict, error = self.find_regex(last_line)
# "match_dict" is None if there are any problems
if match_dict is None:
- self.error(str(error))
+ self.error(error)
return False
+
self.storage['unique_all_time'] = list()
- self.storage['url_pattern'] = check_patterns('url_pattern', self.conf.get('categories'))
- self.storage['user_pattern'] = check_patterns('user_pattern', self.conf.get('user_defined'))
+ self.storage['url_pattern'] = check_patterns('url_pattern', self.configuration.get('categories'))
+ self.storage['user_pattern'] = check_patterns('user_pattern', self.configuration.get('user_defined'))
self.create_web_charts(match_dict) # Create charts
self.info('Collected data: %s' % list(match_dict.keys()))
@@ -420,20 +345,21 @@ class Web(Mixin):
:return:
Create/remove additional charts depending on the 'match_dict' keys and configuration file options
"""
- self.order = ORDER_WEB[:]
- self.definitions = deepcopy(CHARTS_WEB)
-
if 'resp_time' not in match_dict:
self.order.remove('response_time')
if 'resp_time_upstream' not in match_dict:
self.order.remove('response_time_upstream')
- if not self.conf.get('all_time', True):
+ if not self.configuration.get('all_time', True):
self.order.remove('clients_all')
# Add 'detailed_response_codes' chart if specified in the configuration
- if self.conf.get('detailed_response_codes', True):
- codes = DET_RESP_AGGR[:1] if self.conf.get('detailed_response_aggregate', True) else DET_RESP_AGGR[1:]
+ if self.configuration.get('detailed_response_codes', True):
+ if self.configuration.get('detailed_response_aggregate', True):
+ codes = DET_RESP_AGGR[:1]
+ else:
+ codes = DET_RESP_AGGR[1:]
+
for code in codes:
self.order.append('detailed_response_codes%s' % code)
self.definitions['detailed_response_codes%s' % code] \
@@ -444,9 +370,8 @@ class Web(Mixin):
# Add 'requests_per_url' chart if specified in the configuration
if self.storage['url_pattern']:
for elem in self.storage['url_pattern']:
- self.definitions['requests_per_url']['lines'].append([elem.description,
- elem.description[12:],
- 'incremental'])
+ dim = [elem.description, elem.description[12:], 'incremental']
+ self.definitions['requests_per_url']['lines'].append(dim)
self.data[elem.description] = 0
self.data['url_pattern_other'] = 0
else:
@@ -455,9 +380,8 @@ class Web(Mixin):
# Add 'requests_per_user_defined' chart if specified in the configuration
if self.storage['user_pattern'] and 'user_defined' in match_dict:
for elem in self.storage['user_pattern']:
- self.definitions['requests_per_user_defined']['lines'].append([elem.description,
- elem.description[13:],
- 'incremental'])
+ dim = [elem.description, elem.description[13:], 'incremental']
+ self.definitions['requests_per_user_defined']['lines'].append(dim)
self.data[elem.description] = 0
self.data['user_pattern_other'] = 0
else:
@@ -473,7 +397,7 @@ class Web(Mixin):
if not raw_data:
return None if raw_data is None else self.data
- filtered_data = self.filter_data(raw_data=raw_data)
+ filtered_data = filter_data(raw_data=raw_data, pre_filter=self.pre_filter)
unique_current = set()
timings = defaultdict(lambda: dict(minimum=None, maximum=0, summary=0, count=0))
@@ -488,39 +412,24 @@ class Web(Mixin):
except KeyError:
self.data['0xx'] += 1
# detailed response code
- if self.conf.get('detailed_response_codes', True):
+ if self.configuration.get('detailed_response_codes', True):
self.get_data_per_response_codes_detailed(code=match_dict['code'])
# response statuses
self.get_data_per_statuses(code=match_dict['code'])
- # requests per url
- if self.storage['url_pattern']:
- self.get_data_per_pattern(row=match_dict['url'],
- other='url_pattern_other',
- pattern=self.storage['url_pattern'])
# requests per user defined pattern
if self.storage['user_pattern'] and 'user_defined' in match_dict:
self.get_data_per_pattern(row=match_dict['user_defined'],
other='user_pattern_other',
pattern=self.storage['user_pattern'])
- # requests per http method
- if match_dict['method'] not in self.data:
- self.add_new_dimension(dimension_id=match_dict['method'],
- chart_key='http_method')
- self.data[match_dict['method']] += 1
- # requests per http version
- if 'http_version' in match_dict:
- dim_id = match_dict['http_version'].replace('.', '_')
- if dim_id not in self.data:
- self.add_new_dimension(dimension_id=dim_id,
- chart_key='http_version',
- dimension=match_dict['http_version'])
- self.data[dim_id] += 1
+ # method, url, http version
+ self.get_data_from_request_field(match_dict=match_dict)
# bandwidth sent
bytes_sent = match_dict['bytes_sent'] if '-' not in match_dict['bytes_sent'] else 0
self.data['bytes_sent'] += int(bytes_sent)
# request processing time and bandwidth received
if 'resp_length' in match_dict:
- self.data['resp_length'] += int(match_dict['resp_length'])
+ resp_length = match_dict['resp_length'] if '-' not in match_dict['resp_length'] else 0
+ self.data['resp_length'] += int(resp_length)
if 'resp_time' in match_dict:
get_timings(timings=timings['resp_time'],
time=self.storage['func_resp_time'](float(match_dict['resp_time'])))
@@ -531,7 +440,7 @@ class Web(Mixin):
proto = 'ipv6' if ':' in match_dict['address'] else 'ipv4'
self.data['req_' + proto] += 1
# unique clients ips
- if self.conf.get('all_time', True):
+ if self.configuration.get('all_time', True):
if address_not_in_pool(pool=self.storage['unique_all_time'],
address=match_dict['address'],
pool_size=self.data['unique_tot_ipv4'] + self.data['unique_tot_ipv6']):
@@ -562,45 +471,35 @@ class Web(Mixin):
# REGEX: 1.IPv4 address 2.HTTP method 3. URL 4. Response code
# 5. Bytes sent 6. Response length 7. Response process time
default = re.compile(r'(?P<address>[\da-f.:]+|localhost)'
- r' -.*?"(?P<method>[A-Z]+)'
- r' (?P<url>[^ ]+)'
- r' [A-Z]+/(?P<http_version>\d\.\d)"'
+ r' -.*?"(?P<request>[^"]*)"'
r' (?P<code>[1-9]\d{2})'
r' (?P<bytes_sent>\d+|-)')
apache_ext_insert = re.compile(r'(?P<address>[\da-f.:]+|localhost)'
- r' -.*?"(?P<method>[A-Z]+)'
- r' (?P<url>[^ ]+)'
- r' [A-Z]+/(?P<http_version>\d\.\d)"'
+ r' -.*?"(?P<request>[^"]*)"'
r' (?P<code>[1-9]\d{2})'
r' (?P<bytes_sent>\d+|-)'
- r' (?P<resp_length>\d+)'
+ r' (?P<resp_length>\d+|-)'
r' (?P<resp_time>\d+) ')
apache_ext_append = re.compile(r'(?P<address>[\da-f.:]+|localhost)'
- r' -.*?"(?P<method>[A-Z]+)'
- r' (?P<url>[^ ]+)'
- r' [A-Z]+/(?P<http_version>\d\.\d)"'
+ r' -.*?"(?P<request>[^"]*)"'
r' (?P<code>[1-9]\d{2})'
r' (?P<bytes_sent>\d+|-)'
r' .*?'
- r' (?P<resp_length>\d+)'
+ r' (?P<resp_length>\d+|-)'
r' (?P<resp_time>\d+)'
r'(?: |$)')
nginx_ext_insert = re.compile(r'(?P<address>[\da-f.:]+)'
- r' -.*?"(?P<method>[A-Z]+)'
- r' (?P<url>[^ ]+)'
- r' [A-Z]+/(?P<http_version>\d\.\d)"'
+ r' -.*?"(?P<request>[^"]*)"'
r' (?P<code>[1-9]\d{2})'
r' (?P<bytes_sent>\d+)'
r' (?P<resp_length>\d+)'
r' (?P<resp_time>\d+\.\d+) ')
nginx_ext2_insert = re.compile(r'(?P<address>[\da-f.:]+)'
- r' -.*?"(?P<method>[A-Z]+)'
- r' (?P<url>[^ ]+)'
- r' [A-Z]+/(?P<http_version>\d\.\d)"'
+ r' -.*?"(?P<request>[^"]*)"'
r' (?P<code>[1-9]\d{2})'
r' (?P<bytes_sent>\d+)'
r' (?P<resp_length>\d+)'
@@ -608,9 +507,7 @@ class Web(Mixin):
r' (?P<resp_time_upstream>[\d.-]+) ')
nginx_ext_append = re.compile(r'(?P<address>[\da-f.:]+)'
- r' -.*?"(?P<method>[A-Z]+)'
- r' (?P<url>[^ ]+)'
- r' [A-Z]+/(?P<http_version>\d\.\d)"'
+ r' -.*?"(?P<request>[^"]*)"'
r' (?P<code>[1-9]\d{2})'
r' (?P<bytes_sent>\d+)'
r' .*?'
@@ -669,14 +566,14 @@ class Web(Mixin):
("resp_length" is integer or "-", "resp_time" is integer or float)
"""
- if not hasattr(self.conf.get('custom_log_format'), 'keys'):
+ if not hasattr(self.configuration.get('custom_log_format'), 'keys'):
return find_regex_return(msg='Custom log: "custom_log_format" is not a <dict>')
- pattern = self.conf.get('custom_log_format', dict()).get('pattern')
+ pattern = self.configuration.get('custom_log_format', dict()).get('pattern')
if not (pattern and isinstance(pattern, str)):
return find_regex_return(msg='Custom log: "pattern" option is not specified or type is not <str>')
- resp_time_func = self.conf.get('custom_log_format', dict()).get('time_multiplier') or 0
+ resp_time_func = self.configuration.get('custom_log_format', dict()).get('time_multiplier') or 0
if not isinstance(resp_time_func, int):
return find_regex_return(msg='Custom log: "time_multiplier" is not an integer')
@@ -685,6 +582,7 @@ class Web(Mixin):
regex = re.compile(pattern)
except re.error as error:
return find_regex_return(msg='Pattern compile error: %s' % str(error))
+
match = regex.search(last_line)
if not match:
return find_regex_return(msg='Custom log: pattern search FAILED')
@@ -693,14 +591,14 @@ class Web(Mixin):
if match_dict is None:
return find_regex_return(msg='Custom log: search OK but contains no named subgroups'
' (you need to use ?P<subgroup_name>)')
- mandatory_dict = {'address': r'[\da-f.:]+|localhost',
+ mandatory_dict = {'address': r'[\w.:-]+',
'code': r'[1-9]\d{2}',
- 'method': r'[A-Z]+',
'bytes_sent': r'\d+|-'}
- optional_dict = {'resp_length': r'\d+',
+ optional_dict = {'resp_length': r'\d+|-',
'resp_time': r'[\d.]+',
'resp_time_upstream': r'[\d.-]+',
- 'http_version': r'\d(\.\d)?'}
+ 'method': r'[A-Z]+',
+ 'http_version': r'\d(?:.\d)?'}
mandatory_values = set(mandatory_dict) - set(match_dict)
if mandatory_values:
@@ -726,6 +624,36 @@ class Web(Mixin):
self.storage['regex'] = regex
return find_regex_return(match_dict=match_dict)
+ def get_data_from_request_field(self, match_dict):
+ if match_dict.get('request'):
+ match_dict = REQUEST_REGEX.search(match_dict['request'])
+ if match_dict:
+ match_dict = match_dict.groupdict()
+ else:
+ return
+ # requests per url
+ if match_dict.get('url') and self.storage['url_pattern']:
+ self.get_data_per_pattern(row=match_dict['url'],
+ other='url_pattern_other',
+ pattern=self.storage['url_pattern'])
+ # requests per http method
+ if match_dict.get('method'):
+ if match_dict['method'] not in self.data:
+ self.charts['http_method'].add_dimension([match_dict['method'],
+ match_dict['method'],
+ 'incremental'])
+ self.data[match_dict['method']] = 0
+ self.data[match_dict['method']] += 1
+ # requests per http version
+ if match_dict.get('http_version'):
+ dim_id = match_dict['http_version'].replace('.', '_')
+ if dim_id not in self.data:
+ self.charts['http_version'].add_dimension([dim_id,
+ match_dict['http_version'],
+ 'incremental'])
+ self.data[dim_id] = 0
+ self.data[dim_id] += 1
+
def get_data_per_response_codes_detailed(self, code):
"""
:param code: str: CODE from parsed line. Ex.: '202, '499'
@@ -733,14 +661,14 @@ class Web(Mixin):
Calls add_new_dimension method If the value is found for the first time
"""
if code not in self.data:
- if self.conf.get('detailed_response_aggregate', True):
- self.add_new_dimension(dimension_id=code,
- chart_key='detailed_response_codes')
+ if self.configuration.get('detailed_response_aggregate', True):
+ self.charts['detailed_response_codes'].add_dimension([code, code, 'incremental'])
+ self.data[code] = 0
else:
code_index = int(code[0]) if int(code[0]) < 6 else 6
chart_key = 'detailed_response_codes' + DET_RESP_AGGR[code_index]
- self.add_new_dimension(dimension_id=code,
- chart_key=chart_key)
+ self.charts[chart_key].add_dimension([code, code, 'incremental'])
+ self.data[code] = 0
self.data[code] += 1
def get_data_per_pattern(self, row, other, pattern):
@@ -780,8 +708,8 @@ class Web(Mixin):
class ApacheCache:
- def __init__(self, configuration):
- self.conf = configuration
+ def __init__(self, service):
+ self.service = service
self.order = ORDER_APACHE_CACHE
self.definitions = CHARTS_APACHE_CACHE
@@ -805,12 +733,12 @@ class ApacheCache:
return data
-class Squid(Mixin):
- def __init__(self, configuration):
- self.conf = configuration
+class Squid:
+ def __init__(self, service):
+ self.service = service
self.order = ORDER_SQUID
self.definitions = CHARTS_SQUID
- self.pre_filter = check_patterns('filter', self.conf.get('filter'))
+ self.pre_filter = check_patterns('filter', self.configuration.get('filter'))
self.storage = dict()
self.data = {'duration_max': 0, 'duration_avg': 0, 'duration_min': 0, 'bytes': 0,
'0xx': 0, '1xx': 0, '2xx': 0, '3xx': 0, '4xx': 0, '5xx': 0,
@@ -819,8 +747,11 @@ class Squid(Mixin):
'redirects': 0, 'bad_requests': 0, 'server_errors': 0, 'other_requests': 0
}
+ def __getattr__(self, item):
+ return getattr(self.service, item)
+
def check(self):
- last_line = self.get_last_line()
+ last_line = read_last_line(self.log_path)
if not last_line:
return False
self.storage['unique_all_time'] = list()
@@ -856,7 +787,7 @@ class Squid(Mixin):
'chart': 'squid_mime_type',
'func_dim_id': lambda v: v.split('/')[0],
'func_dim': None}}
- if not self.conf.get('all_time', True):
+ if not self.configuration.get('all_time', True):
self.order.remove('squid_clients_all')
return True
@@ -864,7 +795,7 @@ class Squid(Mixin):
if not raw_data:
return None if raw_data is None else self.data
- filtered_data = self.filter_data(raw_data=raw_data)
+ filtered_data = filter_data(raw_data=raw_data, pre_filter=self.pre_filter)
unique_ip = set()
timings = defaultdict(lambda: dict(minimum=None, maximum=0, summary=0, count=0))
@@ -888,7 +819,7 @@ class Squid(Mixin):
proto = 'ipv4' if '.' in match['client_address'] else 'ipv6'
# unique clients ips
- if self.conf.get('all_time', True):
+ if self.configuration.get('all_time', True):
if address_not_in_pool(pool=self.storage['unique_all_time'],
address=match['client_address'],
pool_size=self.data['unique_tot_ipv4'] + self.data['unique_tot_ipv6']):
@@ -904,9 +835,10 @@ class Squid(Mixin):
dimension_id = values['func_dim_id'](match[key]) if values['func_dim_id'] else match[key]
if dimension_id not in self.data:
dimension = values['func_dim'](match[key]) if values['func_dim'] else dimension_id
- self.add_new_dimension(dimension_id=dimension_id,
- chart_key=values['chart'],
- dimension=dimension)
+ self.charts[values['chart']].add_dimension([dimension_id,
+ dimension,
+ 'incremental'])
+ self.data[dimension_id] = 0
self.data[dimension_id] += 1
else:
self.data['unmatched'] += 1
@@ -940,8 +872,8 @@ class Squid(Mixin):
:return:
"""
if code not in self.data:
- self.add_new_dimension(dimension_id=code,
- chart_key='squid_code')
+ self.charts['squid_code'].add_dimension([code, code, 'incremental'])
+ self.data[code] = 0
self.data[code] += 1
for tag in code.split('_'):
@@ -951,9 +883,8 @@ class Squid(Mixin):
continue
dimension_id = '_'.join(['code_detailed', tag])
if dimension_id not in self.data:
- self.add_new_dimension(dimension_id=dimension_id,
- dimension=tag,
- chart_key=chart_key)
+ self.charts[chart_key].add_dimension([dimension_id, tag, 'incremental'])
+ self.data[dimension_id] = 0
self.data[dimension_id] += 1
@@ -1038,14 +969,19 @@ def check_patterns(string, dimension_regex_dict):
return result or None
-def find_job_name(override_name, name):
+def filter_data(raw_data, pre_filter):
"""
- :param override_name: str: 'name' var from configuration file
- :param name: str: 'job_name' from configuration file
- :return: str: new job name
- We need this for dynamic charts. Actually the same logic as in python.d.plugin.
+ :param raw_data:
+ :param pre_filter:
+ :return:
"""
- add_to_name = override_name or name
- if add_to_name:
- return '_'.join(['web_log', re.sub('\s+', '_', add_to_name)])
- return 'web_log'
+
+ if not pre_filter:
+ return raw_data
+ filtered = raw_data
+ for elem in pre_filter:
+ if elem.description == 'filter_include':
+ filtered = filter(elem.func, filtered)
+ elif elem.description == 'filter_exclude':
+ filtered = filterfalse(elem.func, filtered)
+ return filtered