summaryrefslogtreecommitdiffstats
path: root/collectors/python.d.plugin
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2019-09-13 05:05:25 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2019-09-13 05:05:25 +0000
commitd72015c7962af72903326a01fb114f8f2d37eebc (patch)
tree5bb5ae6928f3f2a92f478f69c2f5c9aa9333fbf8 /collectors/python.d.plugin
parentReleasing debian version 1.17.0-3. (diff)
downloadnetdata-d72015c7962af72903326a01fb114f8f2d37eebc.tar.xz
netdata-d72015c7962af72903326a01fb114f8f2d37eebc.zip
Merging upstream version 1.17.1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'collectors/python.d.plugin')
-rw-r--r--collectors/python.d.plugin/Makefile.in2063
-rw-r--r--collectors/python.d.plugin/python.d.plugin733
-rw-r--r--collectors/python.d.plugin/python.d.plugin.in1018
3 files changed, 527 insertions, 3287 deletions
diff --git a/collectors/python.d.plugin/Makefile.in b/collectors/python.d.plugin/Makefile.in
deleted file mode 100644
index d39e4a052..000000000
--- a/collectors/python.d.plugin/Makefile.in
+++ /dev/null
@@ -1,2063 +0,0 @@
-# Makefile.in generated by automake 1.15.1 from Makefile.am.
-# @configure_input@
-
-# Copyright (C) 1994-2017 Free Software Foundation, Inc.
-
-# This Makefile.in is free software; the Free Software Foundation
-# gives unlimited permission to copy and/or distribute it,
-# with or without modifications, as long as this notice is preserved.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
-# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
-# PARTICULAR PURPOSE.
-
-@SET_MAKE@
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-# THIS IS NOT A COMPLETE Makefile
-# IT IS INCLUDED BY ITS PARENT'S Makefile.am
-# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
-
-
-VPATH = @srcdir@
-am__is_gnu_make = { \
- if test -z '$(MAKELEVEL)'; then \
- false; \
- elif test -n '$(MAKE_HOST)'; then \
- true; \
- elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
- true; \
- else \
- false; \
- fi; \
-}
-am__make_running_with_option = \
- case $${target_option-} in \
- ?) ;; \
- *) echo "am__make_running_with_option: internal error: invalid" \
- "target option '$${target_option-}' specified" >&2; \
- exit 1;; \
- esac; \
- has_opt=no; \
- sane_makeflags=$$MAKEFLAGS; \
- if $(am__is_gnu_make); then \
- sane_makeflags=$$MFLAGS; \
- else \
- case $$MAKEFLAGS in \
- *\\[\ \ ]*) \
- bs=\\; \
- sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
- | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
- esac; \
- fi; \
- skip_next=no; \
- strip_trailopt () \
- { \
- flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
- }; \
- for flg in $$sane_makeflags; do \
- test $$skip_next = yes && { skip_next=no; continue; }; \
- case $$flg in \
- *=*|--*) continue;; \
- -*I) strip_trailopt 'I'; skip_next=yes;; \
- -*I?*) strip_trailopt 'I';; \
- -*O) strip_trailopt 'O'; skip_next=yes;; \
- -*O?*) strip_trailopt 'O';; \
- -*l) strip_trailopt 'l'; skip_next=yes;; \
- -*l?*) strip_trailopt 'l';; \
- -[dEDm]) skip_next=yes;; \
- -[JT]) skip_next=yes;; \
- esac; \
- case $$flg in \
- *$$target_option*) has_opt=yes; break;; \
- esac; \
- done; \
- test $$has_opt = yes
-am__make_dryrun = (target_option=n; $(am__make_running_with_option))
-am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
-pkgdatadir = $(datadir)/@PACKAGE@
-pkgincludedir = $(includedir)/@PACKAGE@
-pkglibdir = $(libdir)/@PACKAGE@
-pkglibexecdir = $(libexecdir)/@PACKAGE@
-am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
-install_sh_DATA = $(install_sh) -c -m 644
-install_sh_PROGRAM = $(install_sh) -c
-install_sh_SCRIPT = $(install_sh) -c
-INSTALL_HEADER = $(INSTALL_DATA)
-transform = $(program_transform_name)
-NORMAL_INSTALL = :
-PRE_INSTALL = :
-POST_INSTALL = :
-NORMAL_UNINSTALL = :
-PRE_UNINSTALL = :
-POST_UNINSTALL = :
-build_triplet = @build@
-host_triplet = @host@
-subdir = collectors/python.d.plugin
-ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
-am__aclocal_m4_deps = $(top_srcdir)/build/m4/ax_c___atomic.m4 \
- $(top_srcdir)/build/m4/ax_c__generic.m4 \
- $(top_srcdir)/build/m4/ax_c_lto.m4 \
- $(top_srcdir)/build/m4/ax_c_mallinfo.m4 \
- $(top_srcdir)/build/m4/ax_c_mallopt.m4 \
- $(top_srcdir)/build/m4/ax_check_compile_flag.m4 \
- $(top_srcdir)/build/m4/ax_gcc_func_attribute.m4 \
- $(top_srcdir)/build/m4/ax_pthread.m4 \
- $(top_srcdir)/build/m4/jemalloc.m4 \
- $(top_srcdir)/build/m4/tcmalloc.m4 $(top_srcdir)/configure.ac
-am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
- $(ACLOCAL_M4)
-DIST_COMMON = $(srcdir)/Makefile.am $(dist_plugins_SCRIPTS) \
- $(dist_python_SCRIPTS) $(dist_bases_DATA) \
- $(dist_bases_framework_services_DATA) $(dist_libconfig_DATA) \
- $(dist_noinst_DATA) $(dist_python_DATA) \
- $(dist_python_urllib3_DATA) \
- $(dist_python_urllib3_backports_DATA) \
- $(dist_python_urllib3_contrib_DATA) \
- $(dist_python_urllib3_packages_DATA) \
- $(dist_python_urllib3_securetransport_DATA) \
- $(dist_python_urllib3_ssl_match_hostname_DATA) \
- $(dist_python_urllib3_util_DATA) $(dist_pythonconfig_DATA) \
- $(dist_pythonmodules_DATA) $(dist_pythonyaml2_DATA) \
- $(dist_pythonyaml3_DATA) $(dist_third_party_DATA) \
- $(dist_userpythonconfig_DATA) $(am__DIST_COMMON)
-mkinstalldirs = $(install_sh) -d
-CONFIG_HEADER = $(top_builddir)/config.h
-CONFIG_CLEAN_FILES =
-CONFIG_CLEAN_VPATH_FILES =
-am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
-am__vpath_adj = case $$p in \
- $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
- *) f=$$p;; \
- esac;
-am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
-am__install_max = 40
-am__nobase_strip_setup = \
- srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
-am__nobase_strip = \
- for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
-am__nobase_list = $(am__nobase_strip_setup); \
- for p in $$list; do echo "$$p $$p"; done | \
- sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
- $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
- if (++n[$$2] == $(am__install_max)) \
- { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
- END { for (dir in files) print dir, files[dir] }'
-am__base_list = \
- sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
- sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
-am__uninstall_files_from_dir = { \
- test -z "$$files" \
- || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
- || { echo " ( cd '$$dir' && rm -f" $$files ")"; \
- $(am__cd) "$$dir" && rm -f $$files; }; \
- }
-am__installdirs = "$(DESTDIR)$(pluginsdir)" "$(DESTDIR)$(pythondir)" \
- "$(DESTDIR)$(basesdir)" \
- "$(DESTDIR)$(bases_framework_servicesdir)" \
- "$(DESTDIR)$(libconfigdir)" "$(DESTDIR)$(pythondir)" \
- "$(DESTDIR)$(python_urllib3dir)" \
- "$(DESTDIR)$(python_urllib3_backportsdir)" \
- "$(DESTDIR)$(python_urllib3_contribdir)" \
- "$(DESTDIR)$(python_urllib3_packagesdir)" \
- "$(DESTDIR)$(python_urllib3_securetransportdir)" \
- "$(DESTDIR)$(python_urllib3_ssl_match_hostnamedir)" \
- "$(DESTDIR)$(python_urllib3_utildir)" \
- "$(DESTDIR)$(pythonconfigdir)" "$(DESTDIR)$(pythonmodulesdir)" \
- "$(DESTDIR)$(pythonyaml2dir)" "$(DESTDIR)$(pythonyaml3dir)" \
- "$(DESTDIR)$(third_partydir)" \
- "$(DESTDIR)$(userpythonconfigdir)"
-SCRIPTS = $(dist_plugins_SCRIPTS) $(dist_python_SCRIPTS)
-AM_V_P = $(am__v_P_@AM_V@)
-am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
-am__v_P_0 = false
-am__v_P_1 = :
-AM_V_GEN = $(am__v_GEN_@AM_V@)
-am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
-am__v_GEN_0 = @echo " GEN " $@;
-am__v_GEN_1 =
-AM_V_at = $(am__v_at_@AM_V@)
-am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
-am__v_at_0 = @
-am__v_at_1 =
-SOURCES =
-DIST_SOURCES =
-am__can_run_installinfo = \
- case $$AM_UPDATE_INFO_DIR in \
- n|no|NO) false;; \
- *) (install-info --version) >/dev/null 2>&1;; \
- esac
-DATA = $(dist_bases_DATA) $(dist_bases_framework_services_DATA) \
- $(dist_libconfig_DATA) $(dist_noinst_DATA) $(dist_python_DATA) \
- $(dist_python_urllib3_DATA) \
- $(dist_python_urllib3_backports_DATA) \
- $(dist_python_urllib3_contrib_DATA) \
- $(dist_python_urllib3_packages_DATA) \
- $(dist_python_urllib3_securetransport_DATA) \
- $(dist_python_urllib3_ssl_match_hostname_DATA) \
- $(dist_python_urllib3_util_DATA) $(dist_pythonconfig_DATA) \
- $(dist_pythonmodules_DATA) $(dist_pythonyaml2_DATA) \
- $(dist_pythonyaml3_DATA) $(dist_third_party_DATA) \
- $(dist_userpythonconfig_DATA)
-am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
-am__DIST_COMMON = $(srcdir)/Makefile.in \
- $(srcdir)/adaptec_raid/Makefile.inc \
- $(srcdir)/apache/Makefile.inc $(srcdir)/beanstalk/Makefile.inc \
- $(srcdir)/bind_rndc/Makefile.inc $(srcdir)/boinc/Makefile.inc \
- $(srcdir)/ceph/Makefile.inc $(srcdir)/chrony/Makefile.inc \
- $(srcdir)/couchdb/Makefile.inc \
- $(srcdir)/dns_query_time/Makefile.inc \
- $(srcdir)/dnsdist/Makefile.inc $(srcdir)/dockerd/Makefile.inc \
- $(srcdir)/dovecot/Makefile.inc \
- $(srcdir)/elasticsearch/Makefile.inc \
- $(srcdir)/energid/Makefile.inc $(srcdir)/example/Makefile.inc \
- $(srcdir)/exim/Makefile.inc $(srcdir)/fail2ban/Makefile.inc \
- $(srcdir)/freeradius/Makefile.inc \
- $(srcdir)/go_expvar/Makefile.inc \
- $(srcdir)/haproxy/Makefile.inc $(srcdir)/hddtemp/Makefile.inc \
- $(srcdir)/httpcheck/Makefile.inc \
- $(srcdir)/icecast/Makefile.inc $(srcdir)/ipfs/Makefile.inc \
- $(srcdir)/isc_dhcpd/Makefile.inc \
- $(srcdir)/litespeed/Makefile.inc $(srcdir)/logind/Makefile.inc \
- $(srcdir)/megacli/Makefile.inc \
- $(srcdir)/memcached/Makefile.inc \
- $(srcdir)/mongodb/Makefile.inc $(srcdir)/monit/Makefile.inc \
- $(srcdir)/mysql/Makefile.inc $(srcdir)/nginx/Makefile.inc \
- $(srcdir)/nginx_plus/Makefile.inc $(srcdir)/nsd/Makefile.inc \
- $(srcdir)/ntpd/Makefile.inc $(srcdir)/nvidia_smi/Makefile.inc \
- $(srcdir)/openldap/Makefile.inc \
- $(srcdir)/oracledb/Makefile.inc \
- $(srcdir)/ovpn_status_log/Makefile.inc \
- $(srcdir)/phpfpm/Makefile.inc $(srcdir)/portcheck/Makefile.inc \
- $(srcdir)/postfix/Makefile.inc $(srcdir)/postgres/Makefile.inc \
- $(srcdir)/powerdns/Makefile.inc \
- $(srcdir)/proxysql/Makefile.inc $(srcdir)/puppet/Makefile.inc \
- $(srcdir)/rabbitmq/Makefile.inc $(srcdir)/redis/Makefile.inc \
- $(srcdir)/rethinkdbs/Makefile.inc \
- $(srcdir)/retroshare/Makefile.inc \
- $(srcdir)/riakkv/Makefile.inc $(srcdir)/samba/Makefile.inc \
- $(srcdir)/sensors/Makefile.inc \
- $(srcdir)/smartd_log/Makefile.inc \
- $(srcdir)/spigotmc/Makefile.inc \
- $(srcdir)/springboot/Makefile.inc $(srcdir)/squid/Makefile.inc \
- $(srcdir)/tomcat/Makefile.inc $(srcdir)/tor/Makefile.inc \
- $(srcdir)/traefik/Makefile.inc $(srcdir)/unbound/Makefile.inc \
- $(srcdir)/uwsgi/Makefile.inc $(srcdir)/varnish/Makefile.inc \
- $(srcdir)/w1sensor/Makefile.inc $(srcdir)/web_log/Makefile.inc \
- $(top_srcdir)/build/subst.inc
-DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
-ACLOCAL = @ACLOCAL@
-AMTAR = @AMTAR@
-AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
-AUTOCONF = @AUTOCONF@
-AUTOHEADER = @AUTOHEADER@
-AUTOMAKE = @AUTOMAKE@
-AWK = @AWK@
-CC = @CC@
-CCDEPMODE = @CCDEPMODE@
-CFLAGS = @CFLAGS@
-CPP = @CPP@
-CPPFLAGS = @CPPFLAGS@
-CUPSCONFIG = @CUPSCONFIG@
-CXX = @CXX@
-CXXDEPMODE = @CXXDEPMODE@
-CXXFLAGS = @CXXFLAGS@
-CXX_BINARY = @CXX_BINARY@
-CYGPATH_W = @CYGPATH_W@
-DEFS = @DEFS@
-DEPDIR = @DEPDIR@
-ECHO_C = @ECHO_C@
-ECHO_N = @ECHO_N@
-ECHO_T = @ECHO_T@
-EGREP = @EGREP@
-EXEEXT = @EXEEXT@
-GREP = @GREP@
-INSTALL = @INSTALL@
-INSTALL_DATA = @INSTALL_DATA@
-INSTALL_PROGRAM = @INSTALL_PROGRAM@
-INSTALL_SCRIPT = @INSTALL_SCRIPT@
-INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
-IPMIMONITORING_CFLAGS = @IPMIMONITORING_CFLAGS@
-IPMIMONITORING_LIBS = @IPMIMONITORING_LIBS@
-JSON_CFLAGS = @JSON_CFLAGS@
-JSON_LIBS = @JSON_LIBS@
-LDFLAGS = @LDFLAGS@
-LIBCAP_CFLAGS = @LIBCAP_CFLAGS@
-LIBCAP_LIBS = @LIBCAP_LIBS@
-LIBCRYPTO_CFLAGS = @LIBCRYPTO_CFLAGS@
-LIBCRYPTO_LIBS = @LIBCRYPTO_LIBS@
-LIBCURL_CFLAGS = @LIBCURL_CFLAGS@
-LIBCURL_LIBS = @LIBCURL_LIBS@
-LIBMNL_CFLAGS = @LIBMNL_CFLAGS@
-LIBMNL_LIBS = @LIBMNL_LIBS@
-LIBMONGOC_CFLAGS = @LIBMONGOC_CFLAGS@
-LIBMONGOC_LIBS = @LIBMONGOC_LIBS@
-LIBOBJS = @LIBOBJS@
-LIBS = @LIBS@
-LIBSSL_CFLAGS = @LIBSSL_CFLAGS@
-LIBSSL_LIBS = @LIBSSL_LIBS@
-LTLIBOBJS = @LTLIBOBJS@
-MAINT = @MAINT@
-MAKEINFO = @MAKEINFO@
-MATH_CFLAGS = @MATH_CFLAGS@
-MATH_LIBS = @MATH_LIBS@
-MKDIR_P = @MKDIR_P@
-NFACCT_CFLAGS = @NFACCT_CFLAGS@
-NFACCT_LIBS = @NFACCT_LIBS@
-OBJEXT = @OBJEXT@
-OPTIONAL_CUPS_CFLAGS = @OPTIONAL_CUPS_CFLAGS@
-OPTIONAL_CUPS_LIBS = @OPTIONAL_CUPS_LIBS@
-OPTIONAL_IPMIMONITORING_CFLAGS = @OPTIONAL_IPMIMONITORING_CFLAGS@
-OPTIONAL_IPMIMONITORING_LIBS = @OPTIONAL_IPMIMONITORING_LIBS@
-OPTIONAL_JSONC_LIBS = @OPTIONAL_JSONC_LIBS@
-OPTIONAL_JUDY_LIBS = @OPTIONAL_JUDY_LIBS@
-OPTIONAL_KINESIS_CFLAGS = @OPTIONAL_KINESIS_CFLAGS@
-OPTIONAL_KINESIS_LIBS = @OPTIONAL_KINESIS_LIBS@
-OPTIONAL_LIBCAP_CFLAGS = @OPTIONAL_LIBCAP_CFLAGS@
-OPTIONAL_LIBCAP_LIBS = @OPTIONAL_LIBCAP_LIBS@
-OPTIONAL_LZ4_LIBS = @OPTIONAL_LZ4_LIBS@
-OPTIONAL_MATH_CFLAGS = @OPTIONAL_MATH_CFLAGS@
-OPTIONAL_MATH_LIBS = @OPTIONAL_MATH_LIBS@
-OPTIONAL_MONGOC_CFLAGS = @OPTIONAL_MONGOC_CFLAGS@
-OPTIONAL_MONGOC_LIBS = @OPTIONAL_MONGOC_LIBS@
-OPTIONAL_NFACCT_CFLAGS = @OPTIONAL_NFACCT_CFLAGS@
-OPTIONAL_NFACCT_LIBS = @OPTIONAL_NFACCT_LIBS@
-OPTIONAL_PROMETHEUS_REMOTE_WRITE_CFLAGS = @OPTIONAL_PROMETHEUS_REMOTE_WRITE_CFLAGS@
-OPTIONAL_PROMETHEUS_REMOTE_WRITE_LIBS = @OPTIONAL_PROMETHEUS_REMOTE_WRITE_LIBS@
-OPTIONAL_SSL_LIBS = @OPTIONAL_SSL_LIBS@
-OPTIONAL_UUID_CFLAGS = @OPTIONAL_UUID_CFLAGS@
-OPTIONAL_UUID_LIBS = @OPTIONAL_UUID_LIBS@
-OPTIONAL_UV_LIBS = @OPTIONAL_UV_LIBS@
-OPTIONAL_XENSTAT_CFLAGS = @OPTIONAL_XENSTAT_CFLAGS@
-OPTIONAL_XENSTAT_LIBS = @OPTIONAL_XENSTAT_LIBS@
-OPTIONAL_ZLIB_CFLAGS = @OPTIONAL_ZLIB_CFLAGS@
-OPTIONAL_ZLIB_LIBS = @OPTIONAL_ZLIB_LIBS@
-PACKAGE = @PACKAGE@
-PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
-PACKAGE_NAME = @PACKAGE_NAME@
-PACKAGE_RPM_VERSION = @PACKAGE_RPM_VERSION@
-PACKAGE_STRING = @PACKAGE_STRING@
-PACKAGE_TARNAME = @PACKAGE_TARNAME@
-PACKAGE_URL = @PACKAGE_URL@
-PACKAGE_VERSION = @PACKAGE_VERSION@
-PATH_SEPARATOR = @PATH_SEPARATOR@
-PKG_CONFIG = @PKG_CONFIG@
-PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@
-PKG_CONFIG_PATH = @PKG_CONFIG_PATH@
-PROTOBUF_CFLAGS = @PROTOBUF_CFLAGS@
-PROTOBUF_LIBS = @PROTOBUF_LIBS@
-PROTOC = @PROTOC@
-PTHREAD_CC = @PTHREAD_CC@
-PTHREAD_CFLAGS = @PTHREAD_CFLAGS@
-PTHREAD_LIBS = @PTHREAD_LIBS@
-SET_MAKE = @SET_MAKE@
-SHELL = @SHELL@
-SSE_CANDIDATE = @SSE_CANDIDATE@
-STRIP = @STRIP@
-UUID_CFLAGS = @UUID_CFLAGS@
-UUID_LIBS = @UUID_LIBS@
-VERSION = @VERSION@
-XENLIGHT_CFLAGS = @XENLIGHT_CFLAGS@
-XENLIGHT_LIBS = @XENLIGHT_LIBS@
-YAJL_CFLAGS = @YAJL_CFLAGS@
-YAJL_LIBS = @YAJL_LIBS@
-ZLIB_CFLAGS = @ZLIB_CFLAGS@
-ZLIB_LIBS = @ZLIB_LIBS@
-abs_builddir = @abs_builddir@
-abs_srcdir = @abs_srcdir@
-abs_top_builddir = @abs_top_builddir@
-abs_top_srcdir = @abs_top_srcdir@
-ac_ct_CC = @ac_ct_CC@
-ac_ct_CXX = @ac_ct_CXX@
-am__include = @am__include@
-am__leading_dot = @am__leading_dot@
-am__quote = @am__quote@
-am__tar = @am__tar@
-am__untar = @am__untar@
-ax_pthread_config = @ax_pthread_config@
-bindir = @bindir@
-build = @build@
-build_alias = @build_alias@
-build_cpu = @build_cpu@
-build_os = @build_os@
-build_target = @build_target@
-build_vendor = @build_vendor@
-builddir = @builddir@
-cachedir = @cachedir@
-chartsdir = @chartsdir@
-configdir = @configdir@
-datadir = @datadir@
-datarootdir = @datarootdir@
-docdir = @docdir@
-dvidir = @dvidir@
-exec_prefix = @exec_prefix@
-has_jemalloc = @has_jemalloc@
-has_tcmalloc = @has_tcmalloc@
-host = @host@
-host_alias = @host_alias@
-host_cpu = @host_cpu@
-host_os = @host_os@
-host_vendor = @host_vendor@
-htmldir = @htmldir@
-includedir = @includedir@
-infodir = @infodir@
-install_sh = @install_sh@
-libconfigdir = @libconfigdir@
-libdir = @libdir@
-libexecdir = @libexecdir@
-localedir = @localedir@
-localstatedir = @localstatedir@
-logdir = @logdir@
-mandir = @mandir@
-mkdir_p = @mkdir_p@
-nodedir = @nodedir@
-oldincludedir = @oldincludedir@
-pdfdir = @pdfdir@
-pluginsdir = @pluginsdir@
-prefix = @prefix@
-program_transform_name = @program_transform_name@
-psdir = @psdir@
-pythondir = @pythondir@
-registrydir = @registrydir@
-runstatedir = @runstatedir@
-sbindir = @sbindir@
-sharedstatedir = @sharedstatedir@
-srcdir = @srcdir@
-sysconfdir = @sysconfdir@
-target_alias = @target_alias@
-top_build_prefix = @top_build_prefix@
-top_builddir = @top_builddir@
-top_srcdir = @top_srcdir@
-varlibdir = @varlibdir@
-webdir = @webdir@
-MAINTAINERCLEANFILES = $(srcdir)/Makefile.in
-CLEANFILES = \
- python.d.plugin \
- $(NULL)
-
-SUFFIXES = .in
-dist_libconfig_DATA = \
- python.d.conf \
- $(NULL)
-
-dist_plugins_SCRIPTS = \
- python.d.plugin \
- $(NULL)
-
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-
-# do not install these files, but include them in the distribution
-dist_noinst_DATA = python.d.plugin.in README.md $(NULL) \
- adaptec_raid/README.md adaptec_raid/Makefile.inc \
- apache/README.md apache/Makefile.inc beanstalk/README.md \
- beanstalk/Makefile.inc bind_rndc/README.md \
- bind_rndc/Makefile.inc boinc/README.md boinc/Makefile.inc \
- ceph/README.md ceph/Makefile.inc chrony/README.md \
- chrony/Makefile.inc couchdb/README.md couchdb/Makefile.inc \
- dnsdist/README.md dnsdist/Makefile.inc \
- dns_query_time/README.md dns_query_time/Makefile.inc \
- dockerd/README.md dockerd/Makefile.inc dovecot/README.md \
- dovecot/Makefile.inc elasticsearch/README.md \
- elasticsearch/Makefile.inc energid/README.md \
- energid/Makefile.inc example/README.md example/Makefile.inc \
- exim/README.md exim/Makefile.inc fail2ban/README.md \
- fail2ban/Makefile.inc freeradius/README.md \
- freeradius/Makefile.inc go_expvar/README.md \
- go_expvar/Makefile.inc haproxy/README.md haproxy/Makefile.inc \
- hddtemp/README.md hddtemp/Makefile.inc httpcheck/README.md \
- httpcheck/Makefile.inc icecast/README.md icecast/Makefile.inc \
- ipfs/README.md ipfs/Makefile.inc isc_dhcpd/README.md \
- isc_dhcpd/Makefile.inc litespeed/README.md \
- litespeed/Makefile.inc logind/README.md logind/Makefile.inc \
- megacli/README.md megacli/Makefile.inc memcached/README.md \
- memcached/Makefile.inc mongodb/README.md mongodb/Makefile.inc \
- monit/README.md monit/Makefile.inc mysql/README.md \
- mysql/Makefile.inc nginx/README.md nginx/Makefile.inc \
- nginx_plus/README.md nginx_plus/Makefile.inc \
- nvidia_smi/README.md nvidia_smi/Makefile.inc nsd/README.md \
- nsd/Makefile.inc ntpd/README.md ntpd/Makefile.inc \
- ovpn_status_log/README.md ovpn_status_log/Makefile.inc \
- openldap/README.md openldap/Makefile.inc oracledb/README.md \
- oracledb/Makefile.inc phpfpm/README.md phpfpm/Makefile.inc \
- portcheck/README.md portcheck/Makefile.inc postfix/README.md \
- postfix/Makefile.inc postgres/README.md postgres/Makefile.inc \
- powerdns/README.md powerdns/Makefile.inc proxysql/README.md \
- proxysql/Makefile.inc puppet/README.md puppet/Makefile.inc \
- rabbitmq/README.md rabbitmq/Makefile.inc redis/README.md \
- redis/Makefile.inc rethinkdbs/README.md \
- rethinkdbs/Makefile.inc retroshare/README.md \
- retroshare/Makefile.inc riakkv/README.md riakkv/Makefile.inc \
- samba/README.md samba/Makefile.inc sensors/README.md \
- sensors/Makefile.inc smartd_log/README.md \
- smartd_log/Makefile.inc spigotmc/README.md \
- spigotmc/Makefile.inc springboot/README.md \
- springboot/Makefile.inc squid/README.md squid/Makefile.inc \
- tomcat/README.md tomcat/Makefile.inc tor/README.md \
- tor/Makefile.inc traefik/README.md traefik/Makefile.inc \
- unbound/README.md unbound/Makefile.inc uwsgi/README.md \
- uwsgi/Makefile.inc varnish/README.md varnish/Makefile.inc \
- w1sensor/README.md w1sensor/Makefile.inc web_log/README.md \
- web_log/Makefile.inc
-dist_python_SCRIPTS = \
- $(NULL)
-
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-
-# install these files
-dist_python_DATA = $(NULL) adaptec_raid/adaptec_raid.chart.py \
- apache/apache.chart.py beanstalk/beanstalk.chart.py \
- bind_rndc/bind_rndc.chart.py boinc/boinc.chart.py \
- ceph/ceph.chart.py chrony/chrony.chart.py \
- couchdb/couchdb.chart.py dnsdist/dnsdist.chart.py \
- dns_query_time/dns_query_time.chart.py \
- dockerd/dockerd.chart.py dovecot/dovecot.chart.py \
- elasticsearch/elasticsearch.chart.py energid/energid.chart.py \
- example/example.chart.py exim/exim.chart.py \
- fail2ban/fail2ban.chart.py freeradius/freeradius.chart.py \
- go_expvar/go_expvar.chart.py haproxy/haproxy.chart.py \
- hddtemp/hddtemp.chart.py httpcheck/httpcheck.chart.py \
- icecast/icecast.chart.py ipfs/ipfs.chart.py \
- isc_dhcpd/isc_dhcpd.chart.py litespeed/litespeed.chart.py \
- logind/logind.chart.py megacli/megacli.chart.py \
- memcached/memcached.chart.py mongodb/mongodb.chart.py \
- monit/monit.chart.py mysql/mysql.chart.py nginx/nginx.chart.py \
- nginx_plus/nginx_plus.chart.py nvidia_smi/nvidia_smi.chart.py \
- nsd/nsd.chart.py ntpd/ntpd.chart.py \
- ovpn_status_log/ovpn_status_log.chart.py \
- openldap/openldap.chart.py oracledb/oracledb.chart.py \
- phpfpm/phpfpm.chart.py portcheck/portcheck.chart.py \
- postfix/postfix.chart.py postgres/postgres.chart.py \
- powerdns/powerdns.chart.py proxysql/proxysql.chart.py \
- puppet/puppet.chart.py rabbitmq/rabbitmq.chart.py \
- redis/redis.chart.py rethinkdbs/rethinkdbs.chart.py \
- retroshare/retroshare.chart.py riakkv/riakkv.chart.py \
- samba/samba.chart.py sensors/sensors.chart.py \
- smartd_log/smartd_log.chart.py spigotmc/spigotmc.chart.py \
- springboot/springboot.chart.py squid/squid.chart.py \
- tomcat/tomcat.chart.py tor/tor.chart.py \
- traefik/traefik.chart.py unbound/unbound.chart.py \
- uwsgi/uwsgi.chart.py varnish/varnish.chart.py \
- w1sensor/w1sensor.chart.py web_log/web_log.chart.py
-userpythonconfigdir = $(configdir)/python.d
-dist_userpythonconfig_DATA = \
- .keep \
- $(NULL)
-
-pythonconfigdir = $(libconfigdir)/python.d
-dist_pythonconfig_DATA = $(NULL) adaptec_raid/adaptec_raid.conf \
- apache/apache.conf beanstalk/beanstalk.conf \
- bind_rndc/bind_rndc.conf boinc/boinc.conf ceph/ceph.conf \
- chrony/chrony.conf couchdb/couchdb.conf dnsdist/dnsdist.conf \
- dns_query_time/dns_query_time.conf dockerd/dockerd.conf \
- dovecot/dovecot.conf elasticsearch/elasticsearch.conf \
- energid/energid.conf example/example.conf exim/exim.conf \
- fail2ban/fail2ban.conf freeradius/freeradius.conf \
- go_expvar/go_expvar.conf haproxy/haproxy.conf \
- hddtemp/hddtemp.conf httpcheck/httpcheck.conf \
- icecast/icecast.conf ipfs/ipfs.conf isc_dhcpd/isc_dhcpd.conf \
- litespeed/litespeed.conf logind/logind.conf \
- megacli/megacli.conf memcached/memcached.conf \
- mongodb/mongodb.conf monit/monit.conf mysql/mysql.conf \
- nginx/nginx.conf nginx_plus/nginx_plus.conf \
- nvidia_smi/nvidia_smi.conf nsd/nsd.conf ntpd/ntpd.conf \
- ovpn_status_log/ovpn_status_log.conf openldap/openldap.conf \
- oracledb/oracledb.conf phpfpm/phpfpm.conf \
- portcheck/portcheck.conf postfix/postfix.conf \
- postgres/postgres.conf powerdns/powerdns.conf \
- proxysql/proxysql.conf puppet/puppet.conf \
- rabbitmq/rabbitmq.conf redis/redis.conf \
- rethinkdbs/rethinkdbs.conf retroshare/retroshare.conf \
- riakkv/riakkv.conf samba/samba.conf sensors/sensors.conf \
- smartd_log/smartd_log.conf spigotmc/spigotmc.conf \
- springboot/springboot.conf squid/squid.conf tomcat/tomcat.conf \
- tor/tor.conf traefik/traefik.conf unbound/unbound.conf \
- uwsgi/uwsgi.conf varnish/varnish.conf w1sensor/w1sensor.conf \
- web_log/web_log.conf
-pythonmodulesdir = $(pythondir)/python_modules
-dist_pythonmodules_DATA = \
- python_modules/__init__.py \
- $(NULL)
-
-basesdir = $(pythonmodulesdir)/bases
-dist_bases_DATA = \
- python_modules/bases/__init__.py \
- python_modules/bases/charts.py \
- python_modules/bases/collection.py \
- python_modules/bases/loaders.py \
- python_modules/bases/loggers.py \
- $(NULL)
-
-bases_framework_servicesdir = $(basesdir)/FrameworkServices
-dist_bases_framework_services_DATA = \
- python_modules/bases/FrameworkServices/__init__.py \
- python_modules/bases/FrameworkServices/ExecutableService.py \
- python_modules/bases/FrameworkServices/LogService.py \
- python_modules/bases/FrameworkServices/MySQLService.py \
- python_modules/bases/FrameworkServices/SimpleService.py \
- python_modules/bases/FrameworkServices/SocketService.py \
- python_modules/bases/FrameworkServices/UrlService.py \
- $(NULL)
-
-third_partydir = $(pythonmodulesdir)/third_party
-dist_third_party_DATA = \
- python_modules/third_party/__init__.py \
- python_modules/third_party/ordereddict.py \
- python_modules/third_party/lm_sensors.py \
- python_modules/third_party/mcrcon.py \
- python_modules/third_party/boinc_client.py \
- python_modules/third_party/monotonic.py \
- $(NULL)
-
-pythonyaml2dir = $(pythonmodulesdir)/pyyaml2
-dist_pythonyaml2_DATA = \
- python_modules/pyyaml2/__init__.py \
- python_modules/pyyaml2/composer.py \
- python_modules/pyyaml2/constructor.py \
- python_modules/pyyaml2/cyaml.py \
- python_modules/pyyaml2/dumper.py \
- python_modules/pyyaml2/emitter.py \
- python_modules/pyyaml2/error.py \
- python_modules/pyyaml2/events.py \
- python_modules/pyyaml2/loader.py \
- python_modules/pyyaml2/nodes.py \
- python_modules/pyyaml2/parser.py \
- python_modules/pyyaml2/reader.py \
- python_modules/pyyaml2/representer.py \
- python_modules/pyyaml2/resolver.py \
- python_modules/pyyaml2/scanner.py \
- python_modules/pyyaml2/serializer.py \
- python_modules/pyyaml2/tokens.py \
- $(NULL)
-
-pythonyaml3dir = $(pythonmodulesdir)/pyyaml3
-dist_pythonyaml3_DATA = \
- python_modules/pyyaml3/__init__.py \
- python_modules/pyyaml3/composer.py \
- python_modules/pyyaml3/constructor.py \
- python_modules/pyyaml3/cyaml.py \
- python_modules/pyyaml3/dumper.py \
- python_modules/pyyaml3/emitter.py \
- python_modules/pyyaml3/error.py \
- python_modules/pyyaml3/events.py \
- python_modules/pyyaml3/loader.py \
- python_modules/pyyaml3/nodes.py \
- python_modules/pyyaml3/parser.py \
- python_modules/pyyaml3/reader.py \
- python_modules/pyyaml3/representer.py \
- python_modules/pyyaml3/resolver.py \
- python_modules/pyyaml3/scanner.py \
- python_modules/pyyaml3/serializer.py \
- python_modules/pyyaml3/tokens.py \
- $(NULL)
-
-python_urllib3dir = $(pythonmodulesdir)/urllib3
-dist_python_urllib3_DATA = \
- python_modules/urllib3/__init__.py \
- python_modules/urllib3/_collections.py \
- python_modules/urllib3/connection.py \
- python_modules/urllib3/connectionpool.py \
- python_modules/urllib3/exceptions.py \
- python_modules/urllib3/fields.py \
- python_modules/urllib3/filepost.py \
- python_modules/urllib3/response.py \
- python_modules/urllib3/poolmanager.py \
- python_modules/urllib3/request.py \
- $(NULL)
-
-python_urllib3_utildir = $(python_urllib3dir)/util
-dist_python_urllib3_util_DATA = \
- python_modules/urllib3/util/__init__.py \
- python_modules/urllib3/util/connection.py \
- python_modules/urllib3/util/request.py \
- python_modules/urllib3/util/response.py \
- python_modules/urllib3/util/retry.py \
- python_modules/urllib3/util/selectors.py \
- python_modules/urllib3/util/ssl_.py \
- python_modules/urllib3/util/timeout.py \
- python_modules/urllib3/util/url.py \
- python_modules/urllib3/util/wait.py \
- $(NULL)
-
-python_urllib3_packagesdir = $(python_urllib3dir)/packages
-dist_python_urllib3_packages_DATA = \
- python_modules/urllib3/packages/__init__.py \
- python_modules/urllib3/packages/ordered_dict.py \
- python_modules/urllib3/packages/six.py \
- $(NULL)
-
-python_urllib3_backportsdir = $(python_urllib3_packagesdir)/backports
-dist_python_urllib3_backports_DATA = \
- python_modules/urllib3/packages/backports/__init__.py \
- python_modules/urllib3/packages/backports/makefile.py \
- $(NULL)
-
-python_urllib3_ssl_match_hostnamedir = $(python_urllib3_packagesdir)/ssl_match_hostname
-dist_python_urllib3_ssl_match_hostname_DATA = \
- python_modules/urllib3/packages/ssl_match_hostname/__init__.py \
- python_modules/urllib3/packages/ssl_match_hostname/_implementation.py \
- $(NULL)
-
-python_urllib3_contribdir = $(python_urllib3dir)/contrib
-dist_python_urllib3_contrib_DATA = \
- python_modules/urllib3/contrib/__init__.py \
- python_modules/urllib3/contrib/appengine.py \
- python_modules/urllib3/contrib/ntlmpool.py \
- python_modules/urllib3/contrib/pyopenssl.py \
- python_modules/urllib3/contrib/securetransport.py \
- python_modules/urllib3/contrib/socks.py \
- $(NULL)
-
-python_urllib3_securetransportdir = $(python_urllib3_contribdir)/_securetransport
-dist_python_urllib3_securetransport_DATA = \
- python_modules/urllib3/contrib/_securetransport/__init__.py \
- python_modules/urllib3/contrib/_securetransport/bindings.py \
- python_modules/urllib3/contrib/_securetransport/low_level.py \
- $(NULL)
-
-all: all-am
-
-.SUFFIXES:
-.SUFFIXES: .in
-$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(top_srcdir)/build/subst.inc $(srcdir)/adaptec_raid/Makefile.inc $(srcdir)/apache/Makefile.inc $(srcdir)/beanstalk/Makefile.inc $(srcdir)/bind_rndc/Makefile.inc $(srcdir)/boinc/Makefile.inc $(srcdir)/ceph/Makefile.inc $(srcdir)/chrony/Makefile.inc $(srcdir)/couchdb/Makefile.inc $(srcdir)/dnsdist/Makefile.inc $(srcdir)/dns_query_time/Makefile.inc $(srcdir)/dockerd/Makefile.inc $(srcdir)/dovecot/Makefile.inc $(srcdir)/elasticsearch/Makefile.inc $(srcdir)/energid/Makefile.inc $(srcdir)/example/Makefile.inc $(srcdir)/exim/Makefile.inc $(srcdir)/fail2ban/Makefile.inc $(srcdir)/freeradius/Makefile.inc $(srcdir)/go_expvar/Makefile.inc $(srcdir)/haproxy/Makefile.inc $(srcdir)/hddtemp/Makefile.inc $(srcdir)/httpcheck/Makefile.inc $(srcdir)/icecast/Makefile.inc $(srcdir)/ipfs/Makefile.inc $(srcdir)/isc_dhcpd/Makefile.inc $(srcdir)/litespeed/Makefile.inc $(srcdir)/logind/Makefile.inc $(srcdir)/megacli/Makefile.inc $(srcdir)/memcached/Makefile.inc $(srcdir)/mongodb/Makefile.inc $(srcdir)/monit/Makefile.inc $(srcdir)/mysql/Makefile.inc $(srcdir)/nginx/Makefile.inc $(srcdir)/nginx_plus/Makefile.inc $(srcdir)/nvidia_smi/Makefile.inc $(srcdir)/nsd/Makefile.inc $(srcdir)/ntpd/Makefile.inc $(srcdir)/ovpn_status_log/Makefile.inc $(srcdir)/openldap/Makefile.inc $(srcdir)/oracledb/Makefile.inc $(srcdir)/phpfpm/Makefile.inc $(srcdir)/portcheck/Makefile.inc $(srcdir)/postfix/Makefile.inc $(srcdir)/postgres/Makefile.inc $(srcdir)/powerdns/Makefile.inc $(srcdir)/proxysql/Makefile.inc $(srcdir)/puppet/Makefile.inc $(srcdir)/rabbitmq/Makefile.inc $(srcdir)/redis/Makefile.inc $(srcdir)/rethinkdbs/Makefile.inc $(srcdir)/retroshare/Makefile.inc $(srcdir)/riakkv/Makefile.inc $(srcdir)/samba/Makefile.inc $(srcdir)/sensors/Makefile.inc $(srcdir)/smartd_log/Makefile.inc $(srcdir)/spigotmc/Makefile.inc $(srcdir)/springboot/Makefile.inc $(srcdir)/squid/Makefile.inc $(srcdir)/tomcat/Makefile.inc $(srcdir)/tor/Makefile.inc $(srcdir)/traefik/Makefile.inc $(srcdir)/unbound/Makefile.inc $(srcdir)/uwsgi/Makefile.inc $(srcdir)/varnish/Makefile.inc $(srcdir)/w1sensor/Makefile.inc $(srcdir)/web_log/Makefile.inc $(am__configure_deps)
- @for dep in $?; do \
- case '$(am__configure_deps)' in \
- *$$dep*) \
- ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
- && { if test -f $@; then exit 0; else break; fi; }; \
- exit 1;; \
- esac; \
- done; \
- echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu collectors/python.d.plugin/Makefile'; \
- $(am__cd) $(top_srcdir) && \
- $(AUTOMAKE) --gnu collectors/python.d.plugin/Makefile
-Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
- @case '$?' in \
- *config.status*) \
- cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
- *) \
- echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
- cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
- esac;
-$(top_srcdir)/build/subst.inc $(srcdir)/adaptec_raid/Makefile.inc $(srcdir)/apache/Makefile.inc $(srcdir)/beanstalk/Makefile.inc $(srcdir)/bind_rndc/Makefile.inc $(srcdir)/boinc/Makefile.inc $(srcdir)/ceph/Makefile.inc $(srcdir)/chrony/Makefile.inc $(srcdir)/couchdb/Makefile.inc $(srcdir)/dnsdist/Makefile.inc $(srcdir)/dns_query_time/Makefile.inc $(srcdir)/dockerd/Makefile.inc $(srcdir)/dovecot/Makefile.inc $(srcdir)/elasticsearch/Makefile.inc $(srcdir)/energid/Makefile.inc $(srcdir)/example/Makefile.inc $(srcdir)/exim/Makefile.inc $(srcdir)/fail2ban/Makefile.inc $(srcdir)/freeradius/Makefile.inc $(srcdir)/go_expvar/Makefile.inc $(srcdir)/haproxy/Makefile.inc $(srcdir)/hddtemp/Makefile.inc $(srcdir)/httpcheck/Makefile.inc $(srcdir)/icecast/Makefile.inc $(srcdir)/ipfs/Makefile.inc $(srcdir)/isc_dhcpd/Makefile.inc $(srcdir)/litespeed/Makefile.inc $(srcdir)/logind/Makefile.inc $(srcdir)/megacli/Makefile.inc $(srcdir)/memcached/Makefile.inc $(srcdir)/mongodb/Makefile.inc $(srcdir)/monit/Makefile.inc $(srcdir)/mysql/Makefile.inc $(srcdir)/nginx/Makefile.inc $(srcdir)/nginx_plus/Makefile.inc $(srcdir)/nvidia_smi/Makefile.inc $(srcdir)/nsd/Makefile.inc $(srcdir)/ntpd/Makefile.inc $(srcdir)/ovpn_status_log/Makefile.inc $(srcdir)/openldap/Makefile.inc $(srcdir)/oracledb/Makefile.inc $(srcdir)/phpfpm/Makefile.inc $(srcdir)/portcheck/Makefile.inc $(srcdir)/postfix/Makefile.inc $(srcdir)/postgres/Makefile.inc $(srcdir)/powerdns/Makefile.inc $(srcdir)/proxysql/Makefile.inc $(srcdir)/puppet/Makefile.inc $(srcdir)/rabbitmq/Makefile.inc $(srcdir)/redis/Makefile.inc $(srcdir)/rethinkdbs/Makefile.inc $(srcdir)/retroshare/Makefile.inc $(srcdir)/riakkv/Makefile.inc $(srcdir)/samba/Makefile.inc $(srcdir)/sensors/Makefile.inc $(srcdir)/smartd_log/Makefile.inc $(srcdir)/spigotmc/Makefile.inc $(srcdir)/springboot/Makefile.inc $(srcdir)/squid/Makefile.inc $(srcdir)/tomcat/Makefile.inc $(srcdir)/tor/Makefile.inc $(srcdir)/traefik/Makefile.inc $(srcdir)/unbound/Makefile.inc $(srcdir)/uwsgi/Makefile.inc $(srcdir)/varnish/Makefile.inc $(srcdir)/w1sensor/Makefile.inc $(srcdir)/web_log/Makefile.inc $(am__empty):
-
-$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
- cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
-
-$(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps)
- cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
-$(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps)
- cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
-$(am__aclocal_m4_deps):
-install-dist_pluginsSCRIPTS: $(dist_plugins_SCRIPTS)
- @$(NORMAL_INSTALL)
- @list='$(dist_plugins_SCRIPTS)'; test -n "$(pluginsdir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(pluginsdir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(pluginsdir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \
- done | \
- sed -e 'p;s,.*/,,;n' \
- -e 'h;s|.*|.|' \
- -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \
- $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \
- { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \
- if ($$2 == $$4) { files[d] = files[d] " " $$1; \
- if (++n[d] == $(am__install_max)) { \
- print "f", d, files[d]; n[d] = 0; files[d] = "" } } \
- else { print "f", d "/" $$4, $$1 } } \
- END { for (d in files) print "f", d, files[d] }' | \
- while read type dir files; do \
- if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \
- test -z "$$files" || { \
- echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(pluginsdir)$$dir'"; \
- $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(pluginsdir)$$dir" || exit $$?; \
- } \
- ; done
-
-uninstall-dist_pluginsSCRIPTS:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_plugins_SCRIPTS)'; test -n "$(pluginsdir)" || exit 0; \
- files=`for p in $$list; do echo "$$p"; done | \
- sed -e 's,.*/,,;$(transform)'`; \
- dir='$(DESTDIR)$(pluginsdir)'; $(am__uninstall_files_from_dir)
-install-dist_pythonSCRIPTS: $(dist_python_SCRIPTS)
- @$(NORMAL_INSTALL)
- @list='$(dist_python_SCRIPTS)'; test -n "$(pythondir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(pythondir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(pythondir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \
- done | \
- sed -e 'p;s,.*/,,;n' \
- -e 'h;s|.*|.|' \
- -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \
- $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \
- { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \
- if ($$2 == $$4) { files[d] = files[d] " " $$1; \
- if (++n[d] == $(am__install_max)) { \
- print "f", d, files[d]; n[d] = 0; files[d] = "" } } \
- else { print "f", d "/" $$4, $$1 } } \
- END { for (d in files) print "f", d, files[d] }' | \
- while read type dir files; do \
- if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \
- test -z "$$files" || { \
- echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(pythondir)$$dir'"; \
- $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(pythondir)$$dir" || exit $$?; \
- } \
- ; done
-
-uninstall-dist_pythonSCRIPTS:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_python_SCRIPTS)'; test -n "$(pythondir)" || exit 0; \
- files=`for p in $$list; do echo "$$p"; done | \
- sed -e 's,.*/,,;$(transform)'`; \
- dir='$(DESTDIR)$(pythondir)'; $(am__uninstall_files_from_dir)
-install-dist_basesDATA: $(dist_bases_DATA)
- @$(NORMAL_INSTALL)
- @list='$(dist_bases_DATA)'; test -n "$(basesdir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(basesdir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(basesdir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(basesdir)'"; \
- $(INSTALL_DATA) $$files "$(DESTDIR)$(basesdir)" || exit $$?; \
- done
-
-uninstall-dist_basesDATA:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_bases_DATA)'; test -n "$(basesdir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(basesdir)'; $(am__uninstall_files_from_dir)
-install-dist_bases_framework_servicesDATA: $(dist_bases_framework_services_DATA)
- @$(NORMAL_INSTALL)
- @list='$(dist_bases_framework_services_DATA)'; test -n "$(bases_framework_servicesdir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(bases_framework_servicesdir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(bases_framework_servicesdir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(bases_framework_servicesdir)'"; \
- $(INSTALL_DATA) $$files "$(DESTDIR)$(bases_framework_servicesdir)" || exit $$?; \
- done
-
-uninstall-dist_bases_framework_servicesDATA:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_bases_framework_services_DATA)'; test -n "$(bases_framework_servicesdir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(bases_framework_servicesdir)'; $(am__uninstall_files_from_dir)
-install-dist_libconfigDATA: $(dist_libconfig_DATA)
- @$(NORMAL_INSTALL)
- @list='$(dist_libconfig_DATA)'; test -n "$(libconfigdir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(libconfigdir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(libconfigdir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(libconfigdir)'"; \
- $(INSTALL_DATA) $$files "$(DESTDIR)$(libconfigdir)" || exit $$?; \
- done
-
-uninstall-dist_libconfigDATA:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_libconfig_DATA)'; test -n "$(libconfigdir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(libconfigdir)'; $(am__uninstall_files_from_dir)
-install-dist_pythonDATA: $(dist_python_DATA)
- @$(NORMAL_INSTALL)
- @list='$(dist_python_DATA)'; test -n "$(pythondir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(pythondir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(pythondir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pythondir)'"; \
- $(INSTALL_DATA) $$files "$(DESTDIR)$(pythondir)" || exit $$?; \
- done
-
-uninstall-dist_pythonDATA:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_python_DATA)'; test -n "$(pythondir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(pythondir)'; $(am__uninstall_files_from_dir)
-install-dist_python_urllib3DATA: $(dist_python_urllib3_DATA)
- @$(NORMAL_INSTALL)
- @list='$(dist_python_urllib3_DATA)'; test -n "$(python_urllib3dir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(python_urllib3dir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(python_urllib3dir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(python_urllib3dir)'"; \
- $(INSTALL_DATA) $$files "$(DESTDIR)$(python_urllib3dir)" || exit $$?; \
- done
-
-uninstall-dist_python_urllib3DATA:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_python_urllib3_DATA)'; test -n "$(python_urllib3dir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(python_urllib3dir)'; $(am__uninstall_files_from_dir)
-install-dist_python_urllib3_backportsDATA: $(dist_python_urllib3_backports_DATA)
- @$(NORMAL_INSTALL)
- @list='$(dist_python_urllib3_backports_DATA)'; test -n "$(python_urllib3_backportsdir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(python_urllib3_backportsdir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(python_urllib3_backportsdir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(python_urllib3_backportsdir)'"; \
- $(INSTALL_DATA) $$files "$(DESTDIR)$(python_urllib3_backportsdir)" || exit $$?; \
- done
-
-uninstall-dist_python_urllib3_backportsDATA:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_python_urllib3_backports_DATA)'; test -n "$(python_urllib3_backportsdir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(python_urllib3_backportsdir)'; $(am__uninstall_files_from_dir)
-install-dist_python_urllib3_contribDATA: $(dist_python_urllib3_contrib_DATA)
- @$(NORMAL_INSTALL)
- @list='$(dist_python_urllib3_contrib_DATA)'; test -n "$(python_urllib3_contribdir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(python_urllib3_contribdir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(python_urllib3_contribdir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(python_urllib3_contribdir)'"; \
- $(INSTALL_DATA) $$files "$(DESTDIR)$(python_urllib3_contribdir)" || exit $$?; \
- done
-
-uninstall-dist_python_urllib3_contribDATA:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_python_urllib3_contrib_DATA)'; test -n "$(python_urllib3_contribdir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(python_urllib3_contribdir)'; $(am__uninstall_files_from_dir)
-install-dist_python_urllib3_packagesDATA: $(dist_python_urllib3_packages_DATA)
- @$(NORMAL_INSTALL)
- @list='$(dist_python_urllib3_packages_DATA)'; test -n "$(python_urllib3_packagesdir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(python_urllib3_packagesdir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(python_urllib3_packagesdir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(python_urllib3_packagesdir)'"; \
- $(INSTALL_DATA) $$files "$(DESTDIR)$(python_urllib3_packagesdir)" || exit $$?; \
- done
-
-uninstall-dist_python_urllib3_packagesDATA:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_python_urllib3_packages_DATA)'; test -n "$(python_urllib3_packagesdir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(python_urllib3_packagesdir)'; $(am__uninstall_files_from_dir)
-install-dist_python_urllib3_securetransportDATA: $(dist_python_urllib3_securetransport_DATA)
- @$(NORMAL_INSTALL)
- @list='$(dist_python_urllib3_securetransport_DATA)'; test -n "$(python_urllib3_securetransportdir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(python_urllib3_securetransportdir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(python_urllib3_securetransportdir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(python_urllib3_securetransportdir)'"; \
- $(INSTALL_DATA) $$files "$(DESTDIR)$(python_urllib3_securetransportdir)" || exit $$?; \
- done
-
-uninstall-dist_python_urllib3_securetransportDATA:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_python_urllib3_securetransport_DATA)'; test -n "$(python_urllib3_securetransportdir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(python_urllib3_securetransportdir)'; $(am__uninstall_files_from_dir)
-install-dist_python_urllib3_ssl_match_hostnameDATA: $(dist_python_urllib3_ssl_match_hostname_DATA)
- @$(NORMAL_INSTALL)
- @list='$(dist_python_urllib3_ssl_match_hostname_DATA)'; test -n "$(python_urllib3_ssl_match_hostnamedir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(python_urllib3_ssl_match_hostnamedir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(python_urllib3_ssl_match_hostnamedir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(python_urllib3_ssl_match_hostnamedir)'"; \
- $(INSTALL_DATA) $$files "$(DESTDIR)$(python_urllib3_ssl_match_hostnamedir)" || exit $$?; \
- done
-
-uninstall-dist_python_urllib3_ssl_match_hostnameDATA:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_python_urllib3_ssl_match_hostname_DATA)'; test -n "$(python_urllib3_ssl_match_hostnamedir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(python_urllib3_ssl_match_hostnamedir)'; $(am__uninstall_files_from_dir)
-install-dist_python_urllib3_utilDATA: $(dist_python_urllib3_util_DATA)
- @$(NORMAL_INSTALL)
- @list='$(dist_python_urllib3_util_DATA)'; test -n "$(python_urllib3_utildir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(python_urllib3_utildir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(python_urllib3_utildir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(python_urllib3_utildir)'"; \
- $(INSTALL_DATA) $$files "$(DESTDIR)$(python_urllib3_utildir)" || exit $$?; \
- done
-
-uninstall-dist_python_urllib3_utilDATA:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_python_urllib3_util_DATA)'; test -n "$(python_urllib3_utildir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(python_urllib3_utildir)'; $(am__uninstall_files_from_dir)
-install-dist_pythonconfigDATA: $(dist_pythonconfig_DATA)
- @$(NORMAL_INSTALL)
- @list='$(dist_pythonconfig_DATA)'; test -n "$(pythonconfigdir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(pythonconfigdir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(pythonconfigdir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pythonconfigdir)'"; \
- $(INSTALL_DATA) $$files "$(DESTDIR)$(pythonconfigdir)" || exit $$?; \
- done
-
-uninstall-dist_pythonconfigDATA:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_pythonconfig_DATA)'; test -n "$(pythonconfigdir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(pythonconfigdir)'; $(am__uninstall_files_from_dir)
-install-dist_pythonmodulesDATA: $(dist_pythonmodules_DATA)
- @$(NORMAL_INSTALL)
- @list='$(dist_pythonmodules_DATA)'; test -n "$(pythonmodulesdir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(pythonmodulesdir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(pythonmodulesdir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pythonmodulesdir)'"; \
- $(INSTALL_DATA) $$files "$(DESTDIR)$(pythonmodulesdir)" || exit $$?; \
- done
-
-uninstall-dist_pythonmodulesDATA:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_pythonmodules_DATA)'; test -n "$(pythonmodulesdir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(pythonmodulesdir)'; $(am__uninstall_files_from_dir)
-install-dist_pythonyaml2DATA: $(dist_pythonyaml2_DATA)
- @$(NORMAL_INSTALL)
- @list='$(dist_pythonyaml2_DATA)'; test -n "$(pythonyaml2dir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(pythonyaml2dir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(pythonyaml2dir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pythonyaml2dir)'"; \
- $(INSTALL_DATA) $$files "$(DESTDIR)$(pythonyaml2dir)" || exit $$?; \
- done
-
-uninstall-dist_pythonyaml2DATA:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_pythonyaml2_DATA)'; test -n "$(pythonyaml2dir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(pythonyaml2dir)'; $(am__uninstall_files_from_dir)
-install-dist_pythonyaml3DATA: $(dist_pythonyaml3_DATA)
- @$(NORMAL_INSTALL)
- @list='$(dist_pythonyaml3_DATA)'; test -n "$(pythonyaml3dir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(pythonyaml3dir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(pythonyaml3dir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pythonyaml3dir)'"; \
- $(INSTALL_DATA) $$files "$(DESTDIR)$(pythonyaml3dir)" || exit $$?; \
- done
-
-uninstall-dist_pythonyaml3DATA:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_pythonyaml3_DATA)'; test -n "$(pythonyaml3dir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(pythonyaml3dir)'; $(am__uninstall_files_from_dir)
-install-dist_third_partyDATA: $(dist_third_party_DATA)
- @$(NORMAL_INSTALL)
- @list='$(dist_third_party_DATA)'; test -n "$(third_partydir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(third_partydir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(third_partydir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(third_partydir)'"; \
- $(INSTALL_DATA) $$files "$(DESTDIR)$(third_partydir)" || exit $$?; \
- done
-
-uninstall-dist_third_partyDATA:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_third_party_DATA)'; test -n "$(third_partydir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(third_partydir)'; $(am__uninstall_files_from_dir)
-install-dist_userpythonconfigDATA: $(dist_userpythonconfig_DATA)
- @$(NORMAL_INSTALL)
- @list='$(dist_userpythonconfig_DATA)'; test -n "$(userpythonconfigdir)" || list=; \
- if test -n "$$list"; then \
- echo " $(MKDIR_P) '$(DESTDIR)$(userpythonconfigdir)'"; \
- $(MKDIR_P) "$(DESTDIR)$(userpythonconfigdir)" || exit 1; \
- fi; \
- for p in $$list; do \
- if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
- echo "$$d$$p"; \
- done | $(am__base_list) | \
- while read files; do \
- echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(userpythonconfigdir)'"; \
- $(INSTALL_DATA) $$files "$(DESTDIR)$(userpythonconfigdir)" || exit $$?; \
- done
-
-uninstall-dist_userpythonconfigDATA:
- @$(NORMAL_UNINSTALL)
- @list='$(dist_userpythonconfig_DATA)'; test -n "$(userpythonconfigdir)" || list=; \
- files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
- dir='$(DESTDIR)$(userpythonconfigdir)'; $(am__uninstall_files_from_dir)
-tags TAGS:
-
-ctags CTAGS:
-
-cscope cscopelist:
-
-
-distdir: $(DISTFILES)
- @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
- topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
- list='$(DISTFILES)'; \
- dist_files=`for file in $$list; do echo $$file; done | \
- sed -e "s|^$$srcdirstrip/||;t" \
- -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
- case $$dist_files in \
- */*) $(MKDIR_P) `echo "$$dist_files" | \
- sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
- sort -u` ;; \
- esac; \
- for file in $$dist_files; do \
- if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
- if test -d $$d/$$file; then \
- dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
- if test -d "$(distdir)/$$file"; then \
- find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
- fi; \
- if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
- cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
- find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
- fi; \
- cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
- else \
- test -f "$(distdir)/$$file" \
- || cp -p $$d/$$file "$(distdir)/$$file" \
- || exit 1; \
- fi; \
- done
-check-am: all-am
-check: check-am
-all-am: Makefile $(SCRIPTS) $(DATA)
-installdirs:
- for dir in "$(DESTDIR)$(pluginsdir)" "$(DESTDIR)$(pythondir)" "$(DESTDIR)$(basesdir)" "$(DESTDIR)$(bases_framework_servicesdir)" "$(DESTDIR)$(libconfigdir)" "$(DESTDIR)$(pythondir)" "$(DESTDIR)$(python_urllib3dir)" "$(DESTDIR)$(python_urllib3_backportsdir)" "$(DESTDIR)$(python_urllib3_contribdir)" "$(DESTDIR)$(python_urllib3_packagesdir)" "$(DESTDIR)$(python_urllib3_securetransportdir)" "$(DESTDIR)$(python_urllib3_ssl_match_hostnamedir)" "$(DESTDIR)$(python_urllib3_utildir)" "$(DESTDIR)$(pythonconfigdir)" "$(DESTDIR)$(pythonmodulesdir)" "$(DESTDIR)$(pythonyaml2dir)" "$(DESTDIR)$(pythonyaml3dir)" "$(DESTDIR)$(third_partydir)" "$(DESTDIR)$(userpythonconfigdir)"; do \
- test -z "$$dir" || $(MKDIR_P) "$$dir"; \
- done
-install: install-am
-install-exec: install-exec-am
-install-data: install-data-am
-uninstall: uninstall-am
-
-install-am: all-am
- @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
-
-installcheck: installcheck-am
-install-strip:
- if test -z '$(STRIP)'; then \
- $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
- install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
- install; \
- else \
- $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
- install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
- "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
- fi
-mostlyclean-generic:
-
-clean-generic:
- -test -z "$(CLEANFILES)" || rm -f $(CLEANFILES)
-
-distclean-generic:
- -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
- -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
-
-maintainer-clean-generic:
- @echo "This command is intended for maintainers to use"
- @echo "it deletes files that may require special tools to rebuild."
- -test -z "$(MAINTAINERCLEANFILES)" || rm -f $(MAINTAINERCLEANFILES)
-clean: clean-am
-
-clean-am: clean-generic mostlyclean-am
-
-distclean: distclean-am
- -rm -f Makefile
-distclean-am: clean-am distclean-generic
-
-dvi: dvi-am
-
-dvi-am:
-
-html: html-am
-
-html-am:
-
-info: info-am
-
-info-am:
-
-install-data-am: install-dist_basesDATA \
- install-dist_bases_framework_servicesDATA \
- install-dist_libconfigDATA install-dist_pluginsSCRIPTS \
- install-dist_pythonDATA install-dist_pythonSCRIPTS \
- install-dist_python_urllib3DATA \
- install-dist_python_urllib3_backportsDATA \
- install-dist_python_urllib3_contribDATA \
- install-dist_python_urllib3_packagesDATA \
- install-dist_python_urllib3_securetransportDATA \
- install-dist_python_urllib3_ssl_match_hostnameDATA \
- install-dist_python_urllib3_utilDATA \
- install-dist_pythonconfigDATA install-dist_pythonmodulesDATA \
- install-dist_pythonyaml2DATA install-dist_pythonyaml3DATA \
- install-dist_third_partyDATA install-dist_userpythonconfigDATA
-
-install-dvi: install-dvi-am
-
-install-dvi-am:
-
-install-exec-am:
-
-install-html: install-html-am
-
-install-html-am:
-
-install-info: install-info-am
-
-install-info-am:
-
-install-man:
-
-install-pdf: install-pdf-am
-
-install-pdf-am:
-
-install-ps: install-ps-am
-
-install-ps-am:
-
-installcheck-am:
-
-maintainer-clean: maintainer-clean-am
- -rm -f Makefile
-maintainer-clean-am: distclean-am maintainer-clean-generic
-
-mostlyclean: mostlyclean-am
-
-mostlyclean-am: mostlyclean-generic
-
-pdf: pdf-am
-
-pdf-am:
-
-ps: ps-am
-
-ps-am:
-
-uninstall-am: uninstall-dist_basesDATA \
- uninstall-dist_bases_framework_servicesDATA \
- uninstall-dist_libconfigDATA uninstall-dist_pluginsSCRIPTS \
- uninstall-dist_pythonDATA uninstall-dist_pythonSCRIPTS \
- uninstall-dist_python_urllib3DATA \
- uninstall-dist_python_urllib3_backportsDATA \
- uninstall-dist_python_urllib3_contribDATA \
- uninstall-dist_python_urllib3_packagesDATA \
- uninstall-dist_python_urllib3_securetransportDATA \
- uninstall-dist_python_urllib3_ssl_match_hostnameDATA \
- uninstall-dist_python_urllib3_utilDATA \
- uninstall-dist_pythonconfigDATA \
- uninstall-dist_pythonmodulesDATA \
- uninstall-dist_pythonyaml2DATA uninstall-dist_pythonyaml3DATA \
- uninstall-dist_third_partyDATA \
- uninstall-dist_userpythonconfigDATA
-
-.MAKE: install-am install-strip
-
-.PHONY: all all-am check check-am clean clean-generic cscopelist-am \
- ctags-am distclean distclean-generic distdir dvi dvi-am html \
- html-am info info-am install install-am install-data \
- install-data-am install-dist_basesDATA \
- install-dist_bases_framework_servicesDATA \
- install-dist_libconfigDATA install-dist_pluginsSCRIPTS \
- install-dist_pythonDATA install-dist_pythonSCRIPTS \
- install-dist_python_urllib3DATA \
- install-dist_python_urllib3_backportsDATA \
- install-dist_python_urllib3_contribDATA \
- install-dist_python_urllib3_packagesDATA \
- install-dist_python_urllib3_securetransportDATA \
- install-dist_python_urllib3_ssl_match_hostnameDATA \
- install-dist_python_urllib3_utilDATA \
- install-dist_pythonconfigDATA install-dist_pythonmodulesDATA \
- install-dist_pythonyaml2DATA install-dist_pythonyaml3DATA \
- install-dist_third_partyDATA install-dist_userpythonconfigDATA \
- install-dvi install-dvi-am install-exec install-exec-am \
- install-html install-html-am install-info install-info-am \
- install-man install-pdf install-pdf-am install-ps \
- install-ps-am install-strip installcheck installcheck-am \
- installdirs maintainer-clean maintainer-clean-generic \
- mostlyclean mostlyclean-generic pdf pdf-am ps ps-am tags-am \
- uninstall uninstall-am uninstall-dist_basesDATA \
- uninstall-dist_bases_framework_servicesDATA \
- uninstall-dist_libconfigDATA uninstall-dist_pluginsSCRIPTS \
- uninstall-dist_pythonDATA uninstall-dist_pythonSCRIPTS \
- uninstall-dist_python_urllib3DATA \
- uninstall-dist_python_urllib3_backportsDATA \
- uninstall-dist_python_urllib3_contribDATA \
- uninstall-dist_python_urllib3_packagesDATA \
- uninstall-dist_python_urllib3_securetransportDATA \
- uninstall-dist_python_urllib3_ssl_match_hostnameDATA \
- uninstall-dist_python_urllib3_utilDATA \
- uninstall-dist_pythonconfigDATA \
- uninstall-dist_pythonmodulesDATA \
- uninstall-dist_pythonyaml2DATA uninstall-dist_pythonyaml3DATA \
- uninstall-dist_third_partyDATA \
- uninstall-dist_userpythonconfigDATA
-
-.PRECIOUS: Makefile
-
-.in:
- if sed \
- -e 's#[@]localstatedir_POST@#$(localstatedir)#g' \
- -e 's#[@]sbindir_POST@#$(sbindir)#g' \
- -e 's#[@]pluginsdir_POST@#$(pluginsdir)#g' \
- -e 's#[@]configdir_POST@#$(configdir)#g' \
- -e 's#[@]libconfigdir_POST@#$(libconfigdir)#g' \
- -e 's#[@]cachedir_POST@#$(cachedir)#g' \
- -e 's#[@]registrydir_POST@#$(registrydir)#g' \
- -e 's#[@]varlibdir_POST@#$(varlibdir)#g' \
- $< > $@.tmp; then \
- mv "$@.tmp" "$@"; \
- else \
- rm -f "$@.tmp"; \
- false; \
- fi
-
-# Tell versions [3.59,3.63) of GNU make to not export all variables.
-# Otherwise a system limit (for SysV at least) may be exceeded.
-.NOEXPORT:
diff --git a/collectors/python.d.plugin/python.d.plugin b/collectors/python.d.plugin/python.d.plugin
deleted file mode 100644
index 468df13a4..000000000
--- a/collectors/python.d.plugin/python.d.plugin
+++ /dev/null
@@ -1,733 +0,0 @@
-#!/usr/bin/env bash
-'''':;
-if [[ "$OSTYPE" == "darwin"* ]]; then
- export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
-fi
-exec "$(command -v python || command -v python3 || command -v python2 ||
-echo "ERROR python IS NOT AVAILABLE IN THIS SYSTEM")" "$0" "$@" # '''
-
-# -*- coding: utf-8 -*-
-# Description:
-# Author: Pawel Krupa (paulfantom)
-# Author: Ilya Mashchenko (l2isbad)
-# SPDX-License-Identifier: GPL-3.0-or-later
-
-
-import collections
-import copy
-import gc
-import multiprocessing
-import os
-import re
-import sys
-import time
-import threading
-import types
-
-PY_VERSION = sys.version_info[:2]
-
-if PY_VERSION > (3, 1):
- from importlib.machinery import SourceFileLoader
-else:
- from imp import load_source as SourceFileLoader
-
-
-ENV_NETDATA_USER_CONFIG_DIR = 'NETDATA_USER_CONFIG_DIR'
-ENV_NETDATA_STOCK_CONFIG_DIR = 'NETDATA_STOCK_CONFIG_DIR'
-ENV_NETDATA_PLUGINS_DIR = 'NETDATA_PLUGINS_DIR'
-ENV_NETDATA_UPDATE_EVERY = 'NETDATA_UPDATE_EVERY'
-
-
-def dirs():
- user_config = os.getenv(
- ENV_NETDATA_USER_CONFIG_DIR,
- '/etc/netdata',
- )
- stock_config = os.getenv(
- ENV_NETDATA_STOCK_CONFIG_DIR,
- '/usr/lib/netdata/conf.d',
- )
- modules_user_config = os.path.join(user_config, 'python.d')
- modules_stock_config = os.path.join(stock_config, 'python.d')
-
- modules = os.path.abspath(
- os.getenv(
- ENV_NETDATA_PLUGINS_DIR,
- os.path.dirname(__file__),
- ) + '/../python.d'
- )
- pythond_packages = os.path.join(modules, 'python_modules')
-
- return collections.namedtuple(
- 'Dirs',
- [
- 'user_config',
- 'stock_config',
- 'modules_user_config',
- 'modules_stock_config',
- 'modules',
- 'pythond_packages',
- ]
- )(
- user_config,
- stock_config,
- modules_user_config,
- modules_stock_config,
- modules,
- pythond_packages,
- )
-
-
-DIRS = dirs()
-
-sys.path.append(DIRS.pythond_packages)
-
-
-from bases.collection import safe_print
-from bases.loggers import PythonDLogger
-from bases.loaders import load_config
-
-try:
- from collections import OrderedDict
-except ImportError:
- from third_party.ordereddict import OrderedDict
-
-
-END_TASK_MARKER = None
-
-IS_ATTY = sys.stdout.isatty()
-
-PLUGIN_CONF_FILE = 'python.d.conf'
-
-MODULE_SUFFIX = '.chart.py'
-
-OBSOLETED_MODULES = (
- 'apache_cache', # replaced by web_log
- 'cpuidle', # rewritten in C
- 'cpufreq', # rewritten in C
- 'gunicorn_log', # replaced by web_log
- 'linux_power_supply', # rewritten in C
- 'nginx_log', # replaced by web_log
- 'mdstat', # rewritten in C
- 'sslcheck', # memory leak bug https://github.com/netdata/netdata/issues/5624
-)
-
-
-AVAILABLE_MODULES = [
- m[:-len(MODULE_SUFFIX)] for m in sorted(os.listdir(DIRS.modules))
- if m.endswith(MODULE_SUFFIX) and m[:-len(MODULE_SUFFIX)] not in OBSOLETED_MODULES
-]
-
-PLUGIN_BASE_CONF = {
- 'enabled': True,
- 'default_run': True,
- 'gc_run': True,
- 'gc_interval': 300,
-}
-
-JOB_BASE_CONF = {
- 'update_every': os.getenv(ENV_NETDATA_UPDATE_EVERY, 1),
- 'priority': 60000,
- 'autodetection_retry': 0,
- 'chart_cleanup': 10,
- 'penalty': True,
- 'name': str(),
-}
-
-
-def heartbeat():
- if IS_ATTY:
- return
- safe_print('\n')
-
-
-class HeartBeat(threading.Thread):
- def __init__(self, every):
- threading.Thread.__init__(self)
- self.daemon = True
- self.every = every
-
- def run(self):
- while True:
- time.sleep(self.every)
- heartbeat()
-
-
-def load_module(name):
- abs_path = os.path.join(DIRS.modules, '{0}{1}'.format(name, MODULE_SUFFIX))
- module = SourceFileLoader(name, abs_path)
- if isinstance(module, types.ModuleType):
- return module
- return module.load_module()
-
-
-def multi_path_find(name, paths):
- for path in paths:
- abs_name = os.path.join(path, name)
- if os.path.isfile(abs_name):
- return abs_name
- return ''
-
-
-Task = collections.namedtuple(
- 'Task',
- [
- 'module_name',
- 'explicitly_enabled',
- ],
-)
-
-Result = collections.namedtuple(
- 'Result',
- [
- 'module_name',
- 'jobs_configs',
- ],
-)
-
-
-class ModuleChecker(multiprocessing.Process):
- def __init__(
- self,
- task_queue,
- result_queue,
- ):
- multiprocessing.Process.__init__(self)
- self.log = PythonDLogger()
- self.log.job_name = 'checker'
- self.task_queue = task_queue
- self.result_queue = result_queue
-
- def run(self):
- self.log.info('starting...')
- HeartBeat(1).start()
- while self.run_once():
- pass
- self.log.info('terminating...')
-
- def run_once(self):
- task = self.task_queue.get()
-
- if task is END_TASK_MARKER:
- # TODO: find better solution, understand why heartbeat thread doesn't work
- heartbeat()
- self.task_queue.task_done()
- self.result_queue.put(END_TASK_MARKER)
- return False
-
- result = self.do_task(task)
- if result:
- self.result_queue.put(result)
- self.task_queue.task_done()
-
- return True
-
- def do_task(self, task):
- self.log.info("{0} : checking".format(task.module_name))
-
- # LOAD SOURCE
- module = Module(task.module_name)
- try:
- module.load_source()
- except Exception as error:
- self.log.warning("{0} : error on loading source : {1}, skipping module".format(
- task.module_name,
- error,
- ))
- return None
- else:
- self.log.info("{0} : source successfully loaded".format(task.module_name))
-
- if module.is_disabled_by_default() and not task.explicitly_enabled:
- self.log.info("{0} : disabled by default".format(task.module_name))
- return None
-
- # LOAD CONFIG
- paths = [
- DIRS.modules_user_config,
- DIRS.modules_stock_config,
- ]
-
- conf_abs_path = multi_path_find(
- name='{0}.conf'.format(task.module_name),
- paths=paths,
- )
-
- if conf_abs_path:
- self.log.info("{0} : found config file '{1}'".format(task.module_name, conf_abs_path))
- try:
- module.load_config(conf_abs_path)
- except Exception as error:
- self.log.warning("{0} : error on loading config : {1}, skipping module".format(
- task.module_name, error))
- return None
- else:
- self.log.info("{0} : config was not found in '{1}', using default 1 job config".format(
- task.module_name, paths))
-
- # CHECK JOBS
- jobs = module.create_jobs()
- self.log.info("{0} : created {1} job(s) from the config".format(task.module_name, len(jobs)))
-
- successful_jobs_configs = list()
- for job in jobs:
- if job.autodetection_retry() > 0:
- successful_jobs_configs.append(job.config)
- self.log.info("{0}[{1}]: autodetection job, will be checked in main".format(task.module_name, job.name))
- continue
-
- try:
- job.init()
- except Exception as error:
- self.log.warning("{0}[{1}] : unhandled exception on init : {2}, skipping the job)".format(
- task.module_name, job.name, error))
- continue
-
- try:
- ok = job.check()
- except Exception as error:
- self.log.warning("{0}[{1}] : unhandled exception on check : {2}, skipping the job".format(
- task.module_name, job.name, error))
- continue
-
- if not ok:
- self.log.info("{0}[{1}] : check failed, skipping the job".format(task.module_name, job.name))
- continue
-
- self.log.info("{0}[{1}] : check successful".format(task.module_name, job.name))
-
- job.config['autodetection_retry'] = job.config['update_every']
- successful_jobs_configs.append(job.config)
-
- if not successful_jobs_configs:
- self.log.info("{0} : all jobs failed, skipping module".format(task.module_name))
- return None
-
- return Result(module.source.__name__, successful_jobs_configs)
-
-
-class JobConf(OrderedDict):
- def __init__(self, *args):
- OrderedDict.__init__(self, *args)
-
- def set_defaults_from_module(self, module):
- for k in [k for k in JOB_BASE_CONF if hasattr(module, k)]:
- self[k] = getattr(module, k)
-
- def set_defaults_from_config(self, module_config):
- for k in [k for k in JOB_BASE_CONF if k in module_config]:
- self[k] = module_config[k]
-
- def set_job_name(self, name):
- self['job_name'] = re.sub(r'\s+', '_', name)
-
- def set_override_name(self, name):
- self['override_name'] = re.sub(r'\s+', '_', name)
-
- def as_dict(self):
- return copy.deepcopy(OrderedDict(self))
-
-
-class Job:
- def __init__(
- self,
- service,
- module_name,
- config,
- ):
- self.service = service
- self.config = config
- self.module_name = module_name
- self.name = config['job_name']
- self.override_name = config['override_name']
- self.wrapped = None
-
- def init(self):
- self.wrapped = self.service(configuration=self.config.as_dict())
-
- def check(self):
- return self.wrapped.check()
-
- def post_check(self, min_update_every):
- if self.wrapped.update_every < min_update_every:
- self.wrapped.update_every = min_update_every
-
- def create(self):
- return self.wrapped.create()
-
- def autodetection_retry(self):
- return self.config['autodetection_retry']
-
- def run(self):
- self.wrapped.run()
-
-
-class Module:
- def __init__(self, name):
- self.name = name
- self.source = None
- self.config = dict()
-
- def is_disabled_by_default(self):
- return bool(getattr(self.source, 'disabled_by_default', False))
-
- def load_source(self):
- self.source = load_module(self.name)
-
- def load_config(self, abs_path):
- self.config = load_config(abs_path) or dict()
-
- def gather_jobs_configs(self):
- job_names = [v for v in self.config if isinstance(self.config[v], dict)]
-
- if len(job_names) == 0:
- job_conf = JobConf(JOB_BASE_CONF)
- job_conf.set_defaults_from_module(self.source)
- job_conf.update(self.config)
- job_conf.set_job_name(self.name)
- job_conf.set_override_name(job_conf.pop('name'))
- return [job_conf]
-
- configs = list()
- for job_name in job_names:
- raw_job_conf = self.config[job_name]
- job_conf = JobConf(JOB_BASE_CONF)
- job_conf.set_defaults_from_module(self.source)
- job_conf.set_defaults_from_config(self.config)
- job_conf.update(raw_job_conf)
- job_conf.set_job_name(job_name)
- job_conf.set_override_name(job_conf.pop('name'))
- configs.append(job_conf)
-
- return configs
-
- def create_jobs(self, jobs_conf=None):
- return [Job(self.source.Service, self.name, conf) for conf in jobs_conf or self.gather_jobs_configs()]
-
-
-class JobRunner(threading.Thread):
- def __init__(self, job):
- threading.Thread.__init__(self)
- self.daemon = True
- self.wrapped = job
-
- def run(self):
- self.wrapped.run()
-
-
-class PluginConf(dict):
- def __init__(self, *args):
- dict.__init__(self, *args)
-
- def is_module_enabled(self, module_name, explicit):
- if module_name in self:
- return self[module_name]
- if explicit:
- return False
- return self['default_run']
-
-
-class Plugin:
- def __init__(
- self,
- min_update_every=1,
- modules_to_run=tuple(AVAILABLE_MODULES),
- ):
- self.log = PythonDLogger()
- self.config = PluginConf(PLUGIN_BASE_CONF)
- self.task_queue = multiprocessing.JoinableQueue()
- self.result_queue = multiprocessing.JoinableQueue()
- self.min_update_every = min_update_every
- self.modules_to_run = modules_to_run
- self.auto_detection_jobs = list()
- self.tasks = list()
- self.results = list()
- self.checked_jobs = collections.defaultdict(list)
- self.runs = 0
-
- @staticmethod
- def shutdown():
- safe_print('DISABLE')
- exit(0)
-
- def run(self):
- jobs = self.create_jobs()
- if not jobs:
- return
-
- for job in self.prepare_jobs(jobs):
- self.log.info('{0}[{1}] : started in thread'.format(job.module_name, job.name))
- JobRunner(job).start()
-
- self.serve()
-
- def enqueue_tasks(self):
- for task in self.tasks:
- self.task_queue.put(task)
- self.task_queue.put(END_TASK_MARKER)
-
- def dequeue_results(self):
- while True:
- result = self.result_queue.get()
- self.result_queue.task_done()
- if result is END_TASK_MARKER:
- break
- self.results.append(result)
-
- def load_config(self):
- paths = [
- DIRS.user_config,
- DIRS.stock_config,
- ]
-
- self.log.info("checking for config in {0}".format(paths))
- abs_path = multi_path_find(name=PLUGIN_CONF_FILE, paths=paths)
- if not abs_path:
- self.log.warning('config was not found, using defaults')
- return True
-
- self.log.info("config found, loading config '{0}'".format(abs_path))
- try:
- config = load_config(abs_path) or dict()
- except Exception as error:
- self.log.error('error on loading config : {0}'.format(error))
- return False
-
- self.log.info('config successfully loaded')
- self.config.update(config)
- return True
-
- def setup(self):
- self.log.info('starting setup')
- if not self.load_config():
- return False
-
- if not self.config['enabled']:
- self.log.info('disabled in configuration file')
- return False
-
- for mod in self.modules_to_run:
- if self.config.is_module_enabled(mod, False):
- task = Task(mod, self.config.is_module_enabled(mod, True))
- self.tasks.append(task)
- else:
- self.log.info("{0} : disabled in configuration file".format(mod))
-
- if not self.tasks:
- self.log.info('no modules to run')
- return False
-
- worker = ModuleChecker(self.task_queue, self.result_queue)
- self.log.info('starting checker process ({0} module(s) to check)'.format(len(self.tasks)))
- worker.start()
-
- # TODO: timeouts?
- self.enqueue_tasks()
- self.task_queue.join()
- self.dequeue_results()
- self.result_queue.join()
- self.task_queue.close()
- self.result_queue.close()
- self.log.info('stopping checker process')
- worker.join()
-
- if not self.results:
- self.log.info('no modules to run')
- return False
-
- self.log.info("setup complete, {0} active module(s) : '{1}'".format(
- len(self.results),
- [v.module_name for v in self.results])
- )
-
- return True
-
- def create_jobs(self):
- jobs = list()
- for result in self.results:
- module = Module(result.module_name)
- try:
- module.load_source()
- except Exception as error:
- self.log.warning("{0} : error on loading module source : {1}, skipping module".format(
- result.module_name, error))
- continue
-
- module_jobs = module.create_jobs(result.jobs_configs)
- self.log.info("{0} : created {1} job(s)".format(module.name, len(module_jobs)))
- jobs.extend(module_jobs)
-
- return jobs
-
- def prepare_jobs(self, jobs):
- prepared = list()
-
- for job in jobs:
- check_name = job.override_name or job.name
- if check_name in self.checked_jobs[job.module_name]:
- self.log.info('{0}[{1}] : already served by another job, skipping the job'.format(
- job.module_name, job.name))
- continue
-
- try:
- job.init()
- except Exception as error:
- self.log.warning("{0}[{1}] : unhandled exception on init : {2}, skipping the job".format(
- job.module_name, job.name, error))
- continue
-
- self.log.info("{0}[{1}] : init successful".format(job.module_name, job.name))
-
- try:
- ok = job.check()
- except Exception as error:
- self.log.warning("{0}[{1}] : unhandled exception on check : {2}, skipping the job".format(
- job.module_name, job.name, error))
- continue
-
- if not ok:
- self.log.info('{0}[{1}] : check failed'.format(job.module_name, job.name))
- if job.autodetection_retry() > 0:
- self.log.info('{0}[{1}] : will recheck every {2} second(s)'.format(
- job.module_name, job.name, job.autodetection_retry()))
- self.auto_detection_jobs.append(job)
- continue
-
- self.log.info('{0}[{1}] : check successful'.format(job.module_name, job.name))
-
- job.post_check(int(self.min_update_every))
-
- if not job.create():
- self.log.info('{0}[{1}] : create failed'.format(job.module_name, job.name))
-
- self.checked_jobs[job.module_name].append(check_name)
- prepared.append(job)
-
- return prepared
-
- def serve(self):
- gc_run = self.config['gc_run']
- gc_interval = self.config['gc_interval']
-
- while True:
- self.runs += 1
-
- # threads: main + heartbeat
- if threading.active_count() <= 2 and not self.auto_detection_jobs:
- return
-
- time.sleep(1)
-
- if gc_run and self.runs % gc_interval == 0:
- v = gc.collect()
- self.log.debug('GC collection run result: {0}'.format(v))
-
- self.auto_detection_jobs = [job for job in self.auto_detection_jobs if not self.retry_job(job)]
-
- def retry_job(self, job):
- stop_retrying = True
- retry_later = False
-
- if self.runs % job.autodetection_retry() != 0:
- return retry_later
-
- check_name = job.override_name or job.name
- if check_name in self.checked_jobs[job.module_name]:
- self.log.info("{0}[{1}]: already served by another job, give up on retrying".format(
- job.module_name, job.name))
- return stop_retrying
-
- try:
- ok = job.check()
- except Exception as error:
- self.log.warning("{0}[{1}] : unhandled exception on recheck : {2}, give up on retrying".format(
- job.module_name, job.name, error))
- return stop_retrying
-
- if not ok:
- self.log.info('{0}[{1}] : recheck failed, will retry in {2} second(s)'.format(
- job.module_name, job.name, job.autodetection_retry()))
- return retry_later
- self.log.info('{0}[{1}] : recheck successful'.format(job.module_name, job.name))
-
- if not job.create():
- return stop_retrying
-
- job.post_check(int(self.min_update_every))
- self.checked_jobs[job.module_name].append(check_name)
- JobRunner(job).start()
-
- return stop_retrying
-
-
-def parse_cmd():
- opts = sys.argv[:][1:]
- debug = False
- trace = False
- update_every = 1
- modules_to_run = list()
-
- v = next((opt for opt in opts if opt.isdigit() and int(opt) >= 1), None)
- if v:
- update_every = v
- opts.remove(v)
- if 'debug' in opts:
- debug = True
- opts.remove('debug')
- if 'trace' in opts:
- trace = True
- opts.remove('trace')
- if opts:
- modules_to_run = list(opts)
-
- return collections.namedtuple(
- 'CMD',
- [
- 'update_every',
- 'debug',
- 'trace',
- 'modules_to_run',
- ],
- )(
- update_every,
- debug,
- trace,
- modules_to_run,
- )
-
-
-def main():
- cmd = parse_cmd()
- logger = PythonDLogger()
-
- if cmd.debug:
- logger.logger.severity = 'DEBUG'
- if cmd.trace:
- logger.log_traceback = True
-
- logger.info('using python v{0}'.format(PY_VERSION[0]))
-
- unknown_modules = set(cmd.modules_to_run) - set(AVAILABLE_MODULES)
- if unknown_modules:
- logger.error('unknown modules : {0}'.format(sorted(list(unknown_modules))))
- safe_print('DISABLE')
- return
-
- plugin = Plugin(
- cmd.update_every,
- cmd.modules_to_run or AVAILABLE_MODULES,
- )
-
- HeartBeat(1).start()
-
- if not plugin.setup():
- safe_print('DISABLE')
- return
-
- plugin.run()
- logger.info('exiting from main...')
- plugin.shutdown()
-
-
-if __name__ == '__main__':
- main()
diff --git a/collectors/python.d.plugin/python.d.plugin.in b/collectors/python.d.plugin/python.d.plugin.in
index f1429b6fa..5b8b50a67 100644
--- a/collectors/python.d.plugin/python.d.plugin.in
+++ b/collectors/python.d.plugin/python.d.plugin.in
@@ -1,8 +1,5 @@
#!/usr/bin/env bash
'''':;
-if [[ "$OSTYPE" == "darwin"* ]]; then
- export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
-fi
exec "$(command -v python || command -v python3 || command -v python2 ||
echo "ERROR python IS NOT AVAILABLE IN THIS SYSTEM")" "$0" "$@" # '''
@@ -12,19 +9,25 @@ echo "ERROR python IS NOT AVAILABLE IN THIS SYSTEM")" "$0" "$@" # '''
# Author: Ilya Mashchenko (l2isbad)
# SPDX-License-Identifier: GPL-3.0-or-later
-
import collections
import copy
import gc
-import multiprocessing
+import json
import os
+import pprint
import re
import sys
import time
import threading
import types
-PY_VERSION = sys.version_info[:2]
+try:
+ from queue import Queue
+except ImportError:
+ from Queue import Queue
+
+PY_VERSION = sys.version_info[:2] # (major=3, minor=7, micro=3, releaselevel='final', serial=0)
+
if PY_VERSION > (3, 1):
from importlib.machinery import SourceFileLoader
@@ -35,98 +38,101 @@ else:
ENV_NETDATA_USER_CONFIG_DIR = 'NETDATA_USER_CONFIG_DIR'
ENV_NETDATA_STOCK_CONFIG_DIR = 'NETDATA_STOCK_CONFIG_DIR'
ENV_NETDATA_PLUGINS_DIR = 'NETDATA_PLUGINS_DIR'
+ENV_NETDATA_LIB_DIR = 'NETDATA_LIB_DIR'
ENV_NETDATA_UPDATE_EVERY = 'NETDATA_UPDATE_EVERY'
+def add_pythond_packages():
+ pluginsd = os.getenv(ENV_NETDATA_PLUGINS_DIR, os.path.dirname(__file__))
+ pythond = os.path.abspath(pluginsd + '/../python.d')
+ packages = os.path.join(pythond, 'python_modules')
+ sys.path.append(packages)
+
+
+add_pythond_packages()
+
+
+from bases.collection import safe_print
+from bases.loggers import PythonDLogger
+from bases.loaders import load_config
+
+try:
+ from collections import OrderedDict
+except ImportError:
+ from third_party.ordereddict import OrderedDict
+
+
def dirs():
- user_config = os.getenv(
+ var_lib = os.getenv(
+ ENV_NETDATA_LIB_DIR,
+ '@varlibdir_POST@',
+ )
+ plugin_user_config = os.getenv(
ENV_NETDATA_USER_CONFIG_DIR,
'@configdir_POST@',
)
- stock_config = os.getenv(
+ plugin_stock_config = os.getenv(
ENV_NETDATA_STOCK_CONFIG_DIR,
'@libconfigdir_POST@',
)
- modules_user_config = os.path.join(user_config, 'python.d')
- modules_stock_config = os.path.join(stock_config, 'python.d')
-
- modules = os.path.abspath(
- os.getenv(
- ENV_NETDATA_PLUGINS_DIR,
- os.path.dirname(__file__),
- ) + '/../python.d'
+ pluginsd = os.getenv(
+ ENV_NETDATA_PLUGINS_DIR,
+ os.path.dirname(__file__),
)
- pythond_packages = os.path.join(modules, 'python_modules')
+ modules_user_config = os.path.join(plugin_user_config, 'python.d')
+ modules_stock_config = os.path.join(plugin_stock_config, 'python.d')
+ modules = os.path.abspath(pluginsd + '/../python.d')
- return collections.namedtuple(
+ Dirs = collections.namedtuple(
'Dirs',
[
- 'user_config',
- 'stock_config',
+ 'plugin_user_config',
+ 'plugin_stock_config',
'modules_user_config',
'modules_stock_config',
'modules',
- 'pythond_packages',
+ 'var_lib',
]
- )(
- user_config,
- stock_config,
+ )
+ return Dirs(
+ plugin_user_config,
+ plugin_stock_config,
modules_user_config,
modules_stock_config,
modules,
- pythond_packages,
+ var_lib,
)
DIRS = dirs()
-sys.path.append(DIRS.pythond_packages)
-
-
-from bases.collection import safe_print
-from bases.loggers import PythonDLogger
-from bases.loaders import load_config
-
-try:
- from collections import OrderedDict
-except ImportError:
- from third_party.ordereddict import OrderedDict
-
-
-END_TASK_MARKER = None
-
IS_ATTY = sys.stdout.isatty()
-PLUGIN_CONF_FILE = 'python.d.conf'
-
MODULE_SUFFIX = '.chart.py'
-OBSOLETED_MODULES = (
- 'apache_cache', # replaced by web_log
- 'cpuidle', # rewritten in C
- 'cpufreq', # rewritten in C
- 'gunicorn_log', # replaced by web_log
- 'linux_power_supply', # rewritten in C
- 'nginx_log', # replaced by web_log
- 'mdstat', # rewritten in C
- 'sslcheck', # memory leak bug https://github.com/netdata/netdata/issues/5624
-)
+def available_modules():
+ obsolete = (
+ 'apache_cache', # replaced by web_log
+ 'cpuidle', # rewritten in C
+ 'cpufreq', # rewritten in C
+ 'gunicorn_log', # replaced by web_log
+ 'linux_power_supply', # rewritten in C
+ 'nginx_log', # replaced by web_log
+ 'mdstat', # rewritten in C
+ 'sslcheck', # rewritten in Go, memory leak bug https://github.com/netdata/netdata/issues/5624
+ )
-AVAILABLE_MODULES = [
- m[:-len(MODULE_SUFFIX)] for m in sorted(os.listdir(DIRS.modules))
- if m.endswith(MODULE_SUFFIX) and m[:-len(MODULE_SUFFIX)] not in OBSOLETED_MODULES
-]
+ files = sorted(os.listdir(DIRS.modules))
+ modules = [m[:-len(MODULE_SUFFIX)] for m in files if m.endswith(MODULE_SUFFIX)]
+ avail = [m for m in modules if m not in obsolete]
+ return tuple(avail)
-PLUGIN_BASE_CONF = {
- 'enabled': True,
- 'default_run': True,
- 'gc_run': True,
- 'gc_interval': 300,
-}
+
+AVAILABLE_MODULES = available_modules()
JOB_BASE_CONF = {
- 'update_every': os.getenv(ENV_NETDATA_UPDATE_EVERY, 1),
+ 'update_every': int(os.getenv(ENV_NETDATA_UPDATE_EVERY, 1)),
'priority': 60000,
'autodetection_retry': 0,
'chart_cleanup': 10,
@@ -134,23 +140,20 @@ JOB_BASE_CONF = {
'name': str(),
}
+PLUGIN_BASE_CONF = {
+ 'enabled': True,
+ 'default_run': True,
+ 'gc_run': True,
+ 'gc_interval': 300,
+}
-def heartbeat():
- if IS_ATTY:
- return
- safe_print('\n')
-
-
-class HeartBeat(threading.Thread):
- def __init__(self, every):
- threading.Thread.__init__(self)
- self.daemon = True
- self.every = every
- def run(self):
- while True:
- time.sleep(self.every)
- heartbeat()
+def multi_path_find(name, *paths):
+ for path in paths:
+ abs_name = os.path.join(path, name)
+ if os.path.isfile(abs_name):
+ return abs_name
+ return str()
def load_module(name):
@@ -161,265 +164,268 @@ def load_module(name):
return module.load_module()
-def multi_path_find(name, paths):
- for path in paths:
- abs_name = os.path.join(path, name)
- if os.path.isfile(abs_name):
- return abs_name
- return ''
-
-
-Task = collections.namedtuple(
- 'Task',
- [
- 'module_name',
- 'explicitly_enabled',
- ],
-)
-
-Result = collections.namedtuple(
- 'Result',
- [
- 'module_name',
- 'jobs_configs',
- ],
-)
-
-
-class ModuleChecker(multiprocessing.Process):
- def __init__(
- self,
- task_queue,
- result_queue,
- ):
- multiprocessing.Process.__init__(self)
- self.log = PythonDLogger()
- self.log.job_name = 'checker'
- self.task_queue = task_queue
- self.result_queue = result_queue
-
- def run(self):
- self.log.info('starting...')
- HeartBeat(1).start()
- while self.run_once():
- pass
- self.log.info('terminating...')
+class ModuleConfig:
+ def __init__(self, name, config=None):
+ self.name = name
+ self.config = config or OrderedDict()
- def run_once(self):
- task = self.task_queue.get()
+ def load(self, abs_path):
+ self.config.update(load_config(abs_path) or dict())
- if task is END_TASK_MARKER:
- # TODO: find better solution, understand why heartbeat thread doesn't work
- heartbeat()
- self.task_queue.task_done()
- self.result_queue.put(END_TASK_MARKER)
- return False
+ def defaults(self):
+ keys = (
+ 'update_every',
+ 'priority',
+ 'autodetection_retry',
+ 'chart_cleanup',
+ 'penalty',
+ )
+ return dict((k, self.config[k]) for k in keys if k in self.config)
- result = self.do_task(task)
- if result:
- self.result_queue.put(result)
- self.task_queue.task_done()
+ def create_job(self, job_name, job_config=None):
+ job_config = job_config or dict()
- return True
+ config = OrderedDict()
+ config.update(job_config)
+ config['job_name'] = job_name
+ for k, v in self.defaults().items():
+ config.setdefault(k, v)
- def do_task(self, task):
- self.log.info("{0} : checking".format(task.module_name))
+ return config
- # LOAD SOURCE
- module = Module(task.module_name)
- try:
- module.load_source()
- except Exception as error:
- self.log.warning("{0} : error on loading source : {1}, skipping module".format(
- task.module_name,
- error,
- ))
- return None
- else:
- self.log.info("{0} : source successfully loaded".format(task.module_name))
+ def job_names(self):
+ return [v for v in self.config if isinstance(self.config.get(v), dict)]
- if module.is_disabled_by_default() and not task.explicitly_enabled:
- self.log.info("{0} : disabled by default".format(task.module_name))
- return None
+ def single_job(self):
+ return [self.create_job(self.name)]
- # LOAD CONFIG
- paths = [
- DIRS.modules_user_config,
- DIRS.modules_stock_config,
- ]
+ def multi_job(self):
+ return [self.create_job(n, self.config[n]) for n in self.job_names()]
- conf_abs_path = multi_path_find(
- name='{0}.conf'.format(task.module_name),
- paths=paths,
- )
+ def create_jobs(self):
+ return self.multi_job() or self.single_job()
- if conf_abs_path:
- self.log.info("{0} : found config file '{1}'".format(task.module_name, conf_abs_path))
- try:
- module.load_config(conf_abs_path)
- except Exception as error:
- self.log.warning("{0} : error on loading config : {1}, skipping module".format(
- task.module_name, error))
- return None
- else:
- self.log.info("{0} : config was not found in '{1}', using default 1 job config".format(
- task.module_name, paths))
-
- # CHECK JOBS
- jobs = module.create_jobs()
- self.log.info("{0} : created {1} job(s) from the config".format(task.module_name, len(jobs)))
-
- successful_jobs_configs = list()
- for job in jobs:
- if job.autodetection_retry() > 0:
- successful_jobs_configs.append(job.config)
- self.log.info("{0}[{1}]: autodetection job, will be checked in main".format(task.module_name, job.name))
- continue
- try:
- job.init()
- except Exception as error:
- self.log.warning("{0}[{1}] : unhandled exception on init : {2}, skipping the job)".format(
- task.module_name, job.name, error))
- continue
+class JobsConfigsBuilder:
+ def __init__(self, config_dirs):
+ self.config_dirs = config_dirs
+ self.log = PythonDLogger()
+ self.job_defaults = None
+ self.module_defaults = None
+ self.min_update_every = None
- try:
- ok = job.check()
- except Exception as error:
- self.log.warning("{0}[{1}] : unhandled exception on check : {2}, skipping the job".format(
- task.module_name, job.name, error))
- continue
+ def load_module_config(self, module_name):
+ name = '{0}.conf'.format(module_name)
+ self.log.debug("[{0}] looking for '{1}' in {2}".format(module_name, name, self.config_dirs))
+ config = ModuleConfig(module_name)
- if not ok:
- self.log.info("{0}[{1}] : check failed, skipping the job".format(task.module_name, job.name))
- continue
+ abs_path = multi_path_find(name, *self.config_dirs)
+ if not abs_path:
+ self.log.warning("[{0}] '{1}' was not found".format(module_name, name))
+ return config
- self.log.info("{0}[{1}] : check successful".format(task.module_name, job.name))
+ self.log.debug("[{0}] loading '{1}'".format(module_name, abs_path))
+ try:
+ config.load(abs_path)
+ except Exception as error:
+ self.log.error("[{0}] error on loading '{1}' : {2}".format(module_name, abs_path, repr(error)))
+ return None
- job.config['autodetection_retry'] = job.config['update_every']
- successful_jobs_configs.append(job.config)
+ self.log.debug("[{0}] '{1}' is loaded".format(module_name, abs_path))
+ return config
- if not successful_jobs_configs:
- self.log.info("{0} : all jobs failed, skipping module".format(task.module_name))
- return None
+ @staticmethod
+ def apply_defaults(jobs, defaults):
+ if defaults is None:
+ return
+ for k, v in defaults.items():
+ for job in jobs:
+ job.setdefault(k, v)
- return Result(module.source.__name__, successful_jobs_configs)
+ def set_min_update_every(self, jobs, min_update_every):
+ if min_update_every is None:
+ return
+ for job in jobs:
+ if 'update_every' in job and job['update_every'] < self.min_update_every:
+ job['update_every'] = self.min_update_every
+ def build(self, module_name):
+ config = self.load_module_config(module_name)
+ if config is None:
+ return None
-class JobConf(OrderedDict):
- def __init__(self, *args):
- OrderedDict.__init__(self, *args)
+ configs = config.create_jobs()
+ self.log.info("[{0}] built {1} job(s) configs".format(module_name, len(configs)))
- def set_defaults_from_module(self, module):
- for k in [k for k in JOB_BASE_CONF if hasattr(module, k)]:
- self[k] = getattr(module, k)
+ self.apply_defaults(configs, self.module_defaults)
+ self.apply_defaults(configs, self.job_defaults)
+ self.set_min_update_every(configs, self.min_update_every)
- def set_defaults_from_config(self, module_config):
- for k in [k for k in JOB_BASE_CONF if k in module_config]:
- self[k] = module_config[k]
+ return configs
- def set_job_name(self, name):
- self['job_name'] = re.sub(r'\s+', '_', name)
- def set_override_name(self, name):
- self['override_name'] = re.sub(r'\s+', '_', name)
+JOB_STATUS_ACTIVE = 'active'
+JOB_STATUS_RECOVERING = 'recovering'
+JOB_STATUS_DROPPED = 'dropped'
+JOB_STATUS_INIT = 'initial'
- def as_dict(self):
- return copy.deepcopy(OrderedDict(self))
+class Job(threading.Thread):
+ inf = -1
-class Job:
- def __init__(
- self,
- service,
- module_name,
- config,
- ):
+ def __init__(self, service, module_name, config):
+ threading.Thread.__init__(self)
+ self.daemon = True
self.service = service
- self.config = config
self.module_name = module_name
- self.name = config['job_name']
- self.override_name = config['override_name']
- self.wrapped = None
+ self.config = config
+ self.real_name = config['job_name']
+ self.actual_name = config['override_name'] or self.real_name
+ self.autodetection_retry = config['autodetection_retry']
+ self.checks = self.inf
+ self.job = None
+ self.status = JOB_STATUS_INIT
+
+ def is_inited(self):
+ return self.job is not None
def init(self):
- self.wrapped = self.service(configuration=self.config.as_dict())
+ self.job = self.service(configuration=copy.deepcopy(self.config))
def check(self):
- return self.wrapped.check()
-
- def post_check(self, min_update_every):
- if self.wrapped.update_every < min_update_every:
- self.wrapped.update_every = min_update_every
+ ok = self.job.check()
+ self.checks -= self.checks != self.inf and not ok
+ return ok
def create(self):
- return self.wrapped.create()
+ self.job.create()
- def autodetection_retry(self):
- return self.config['autodetection_retry']
+ def need_to_recheck(self):
+ return self.autodetection_retry != 0 and self.checks != 0
def run(self):
- self.wrapped.run()
+ self.job.run()
-class Module:
+class ModuleSrc:
def __init__(self, name):
self.name = name
- self.source = None
- self.config = dict()
+ self.src = None
+
+ def load(self):
+ self.src = load_module(self.name)
+
+ def get(self, key):
+ return getattr(self.src, key, None)
+
+ def service(self):
+ return self.get('Service')
+
+ def defaults(self):
+ keys = (
+ 'update_every',
+ 'priority',
+ 'autodetection_retry',
+ 'chart_cleanup',
+ 'penalty',
+ )
+ return dict((k, self.get(k)) for k in keys if self.get(k) is not None)
def is_disabled_by_default(self):
- return bool(getattr(self.source, 'disabled_by_default', False))
-
- def load_source(self):
- self.source = load_module(self.name)
-
- def load_config(self, abs_path):
- self.config = load_config(abs_path) or dict()
-
- def gather_jobs_configs(self):
- job_names = [v for v in self.config if isinstance(self.config[v], dict)]
-
- if len(job_names) == 0:
- job_conf = JobConf(JOB_BASE_CONF)
- job_conf.set_defaults_from_module(self.source)
- job_conf.update(self.config)
- job_conf.set_job_name(self.name)
- job_conf.set_override_name(job_conf.pop('name'))
- return [job_conf]
-
- configs = list()
- for job_name in job_names:
- raw_job_conf = self.config[job_name]
- job_conf = JobConf(JOB_BASE_CONF)
- job_conf.set_defaults_from_module(self.source)
- job_conf.set_defaults_from_config(self.config)
- job_conf.update(raw_job_conf)
- job_conf.set_job_name(job_name)
- job_conf.set_override_name(job_conf.pop('name'))
- configs.append(job_conf)
+ return bool(self.get('disabled_by_default'))
- return configs
- def create_jobs(self, jobs_conf=None):
- return [Job(self.source.Service, self.name, conf) for conf in jobs_conf or self.gather_jobs_configs()]
+class JobsStatuses:
+ def __init__(self):
+ self.items = OrderedDict()
+ def dump(self):
+ return json.dumps(self.items, indent=2)
-class JobRunner(threading.Thread):
- def __init__(self, job):
- threading.Thread.__init__(self)
- self.daemon = True
- self.wrapped = job
+ def get(self, module_name, job_name):
+ if module_name not in self.items:
+ return None
+ return self.items[module_name].get(job_name)
- def run(self):
- self.wrapped.run()
+ def has(self, module_name, job_name):
+ return self.get(module_name, job_name) is not None
+ def from_file(self, path):
+ with open(path) as f:
+ data = json.load(f)
+ return self.from_json(data)
-class PluginConf(dict):
+ @staticmethod
+ def from_json(items):
+ if not isinstance(items, dict):
+ raise Exception('items obj has wrong type : {0}'.format(type(items)))
+ if not items:
+ return JobsStatuses()
+
+ v = OrderedDict()
+ for mod_name in sorted(items):
+ if not items[mod_name]:
+ continue
+ v[mod_name] = OrderedDict()
+ for job_name in sorted(items[mod_name]):
+ v[mod_name][job_name] = items[mod_name][job_name]
+
+ rv = JobsStatuses()
+ rv.items = v
+ return rv
+
+ @staticmethod
+ def from_jobs(jobs):
+ v = OrderedDict()
+ for job in jobs:
+ status = job.status
+ if status not in (JOB_STATUS_ACTIVE, JOB_STATUS_RECOVERING):
+ continue
+ if job.module_name not in v:
+ v[job.module_name] = OrderedDict()
+ v[job.module_name][job.real_name] = status
+
+ rv = JobsStatuses()
+ rv.items = v
+ return rv
+
+
+class StdoutSaver:
+ @staticmethod
+ def save(dump):
+ print(dump)
+
+
+class CachedFileSaver:
+ def __init__(self, path):
+ self.last_save_success = False
+ self.last_saved_dump = str()
+ self.path = path
+
+ def save(self, dump):
+ if self.last_save_success and self.last_saved_dump == dump:
+ return
+ try:
+ with open(self.path, 'w') as out:
+ out.write(dump)
+ except Exception:
+ self.last_save_success = False
+ raise
+ self.last_saved_dump = dump
+ self.last_save_success = True
+
+
+class PluginConfig(dict):
def __init__(self, *args):
dict.__init__(self, *args)
- def is_module_enabled(self, module_name, explicit):
+ def is_module_explicitly_enabled(self, module_name):
+ return self._is_module_enabled(module_name, True)
+
+ def is_module_enabled(self, module_name):
+ return self._is_module_enabled(module_name, False)
+
+ def _is_module_enabled(self, module_name, explicit):
if module_name in self:
return self[module_name]
if explicit:
@@ -428,249 +434,253 @@ class PluginConf(dict):
class Plugin:
- def __init__(
- self,
- min_update_every=1,
- modules_to_run=tuple(AVAILABLE_MODULES),
- ):
- self.log = PythonDLogger()
- self.config = PluginConf(PLUGIN_BASE_CONF)
- self.task_queue = multiprocessing.JoinableQueue()
- self.result_queue = multiprocessing.JoinableQueue()
- self.min_update_every = min_update_every
+ config_name = 'python.d.conf'
+ jobs_status_dump_name = 'pythond-jobs-statuses.json'
+
+ def __init__(self, modules_to_run, min_update_every):
self.modules_to_run = modules_to_run
- self.auto_detection_jobs = list()
- self.tasks = list()
- self.results = list()
- self.checked_jobs = collections.defaultdict(list)
+ self.min_update_every = min_update_every
+ self.config = PluginConfig(PLUGIN_BASE_CONF)
+ self.log = PythonDLogger()
+ self.started_jobs = collections.defaultdict(dict)
+ self.jobs = list()
+ self.saver = None
self.runs = 0
- @staticmethod
- def shutdown():
- safe_print('DISABLE')
- exit(0)
-
- def run(self):
- jobs = self.create_jobs()
- if not jobs:
- return
-
- for job in self.prepare_jobs(jobs):
- self.log.info('{0}[{1}] : started in thread'.format(job.module_name, job.name))
- JobRunner(job).start()
-
- self.serve()
-
- def enqueue_tasks(self):
- for task in self.tasks:
- self.task_queue.put(task)
- self.task_queue.put(END_TASK_MARKER)
-
- def dequeue_results(self):
- while True:
- result = self.result_queue.get()
- self.result_queue.task_done()
- if result is END_TASK_MARKER:
- break
- self.results.append(result)
-
def load_config(self):
paths = [
- DIRS.user_config,
- DIRS.stock_config,
+ DIRS.plugin_user_config,
+ DIRS.plugin_stock_config,
]
-
- self.log.info("checking for config in {0}".format(paths))
- abs_path = multi_path_find(name=PLUGIN_CONF_FILE, paths=paths)
+ self.log.debug("looking for '{0}' in {1}".format(self.config_name, paths))
+ abs_path = multi_path_find(self.config_name, *paths)
if not abs_path:
- self.log.warning('config was not found, using defaults')
+ self.log.warning("'{0}' was not found, using defaults".format(self.config_name))
return True
- self.log.info("config found, loading config '{0}'".format(abs_path))
+ self.log.debug("loading '{0}'".format(abs_path))
try:
- config = load_config(abs_path) or dict()
+ config = load_config(abs_path)
except Exception as error:
- self.log.error('error on loading config : {0}'.format(error))
+ self.log.error("error on loading '{0}' : {1}".format(abs_path, repr(error)))
return False
- self.log.info('config successfully loaded')
+ self.log.debug("'{0}' is loaded".format(abs_path))
self.config.update(config)
return True
- def setup(self):
- self.log.info('starting setup')
- if not self.load_config():
- return False
-
- if not self.config['enabled']:
- self.log.info('disabled in configuration file')
- return False
-
- for mod in self.modules_to_run:
- if self.config.is_module_enabled(mod, False):
- task = Task(mod, self.config.is_module_enabled(mod, True))
- self.tasks.append(task)
- else:
- self.log.info("{0} : disabled in configuration file".format(mod))
-
- if not self.tasks:
- self.log.info('no modules to run')
- return False
+ def load_job_statuses(self):
+ self.log.debug("looking for '{0}' in {1}".format(self.jobs_status_dump_name, DIRS.var_lib))
+ abs_path = multi_path_find(self.jobs_status_dump_name, DIRS.var_lib)
+ if not abs_path:
+ self.log.warning("'{0}' was not found".format(self.jobs_status_dump_name))
+ return
- worker = ModuleChecker(self.task_queue, self.result_queue)
- self.log.info('starting checker process ({0} module(s) to check)'.format(len(self.tasks)))
- worker.start()
-
- # TODO: timeouts?
- self.enqueue_tasks()
- self.task_queue.join()
- self.dequeue_results()
- self.result_queue.join()
- self.task_queue.close()
- self.result_queue.close()
- self.log.info('stopping checker process')
- worker.join()
-
- if not self.results:
- self.log.info('no modules to run')
- return False
+ self.log.debug("loading '{0}'".format(abs_path))
+ try:
+ statuses = JobsStatuses().from_file(abs_path)
+ except Exception as error:
+ self.log.warning("error on loading '{0}' : {1}".format(abs_path, repr(error)))
+ return None
+ self.log.debug("'{0}' is loaded".format(abs_path))
+ return statuses
- self.log.info("setup complete, {0} active module(s) : '{1}'".format(
- len(self.results),
- [v.module_name for v in self.results])
- )
+ def create_jobs(self, job_statuses=None):
+ paths = [
+ DIRS.modules_user_config,
+ DIRS.modules_stock_config,
+ ]
- return True
+ builder = JobsConfigsBuilder(paths)
+ builder.job_defaults = JOB_BASE_CONF
+ builder.min_update_every = self.min_update_every
- def create_jobs(self):
jobs = list()
- for result in self.results:
- module = Module(result.module_name)
+ for mod_name in self.modules_to_run:
+ if not self.config.is_module_enabled(mod_name):
+ self.log.info("[{0}] is disabled in the configuration file, skipping it".format(mod_name))
+ continue
+
+ src = ModuleSrc(mod_name)
try:
- module.load_source()
+ src.load()
except Exception as error:
- self.log.warning("{0} : error on loading module source : {1}, skipping module".format(
- result.module_name, error))
+ self.log.warning("[{0}] error on loading source : {1}, skipping it".format(mod_name, repr(error)))
continue
- module_jobs = module.create_jobs(result.jobs_configs)
- self.log.info("{0} : created {1} job(s)".format(module.name, len(module_jobs)))
- jobs.extend(module_jobs)
-
- return jobs
-
- def prepare_jobs(self, jobs):
- prepared = list()
+ if not (src.service() and callable(src.service())):
+ self.log.warning("[{0}] has no callable Service object, skipping it".format(mod_name))
+ continue
- for job in jobs:
- check_name = job.override_name or job.name
- if check_name in self.checked_jobs[job.module_name]:
- self.log.info('{0}[{1}] : already served by another job, skipping the job'.format(
- job.module_name, job.name))
+ if src.is_disabled_by_default() and not self.config.is_module_explicitly_enabled(mod_name):
+ self.log.info("[{0}] is disabled by default, skipping it".format(mod_name))
continue
- try:
- job.init()
- except Exception as error:
- self.log.warning("{0}[{1}] : unhandled exception on init : {2}, skipping the job".format(
- job.module_name, job.name, error))
+ builder.module_defaults = src.defaults()
+ configs = builder.build(mod_name)
+ if not configs:
+ self.log.info("[{0}] has no job configs, skipping it".format(mod_name))
continue
- self.log.info("{0}[{1}] : init successful".format(job.module_name, job.name))
+ for config in configs:
+ config['job_name'] = re.sub(r'\s+', '_', config['job_name'])
+ config['override_name'] = re.sub(r'\s+', '_', config.pop('name'))
- try:
- ok = job.check()
- except Exception as error:
- self.log.warning("{0}[{1}] : unhandled exception on check : {2}, skipping the job".format(
- job.module_name, job.name, error))
- continue
+ job = Job(src.service(), mod_name, config)
- if not ok:
- self.log.info('{0}[{1}] : check failed'.format(job.module_name, job.name))
- if job.autodetection_retry() > 0:
- self.log.info('{0}[{1}] : will recheck every {2} second(s)'.format(
- job.module_name, job.name, job.autodetection_retry()))
- self.auto_detection_jobs.append(job)
- continue
+ was_previously_active = job_statuses and job_statuses.has(job.module_name, job.real_name)
+ if was_previously_active and job.autodetection_retry == 0:
+ self.log.debug('{0}[{1}] was previously active, applying recovering settings'.format(
+ job.module_name, job.real_name))
+ job.checks = 11
+ job.autodetection_retry = 30
- self.log.info('{0}[{1}] : check successful'.format(job.module_name, job.name))
+ jobs.append(job)
- job.post_check(int(self.min_update_every))
+ return jobs
- if not job.create():
- self.log.info('{0}[{1}] : create failed'.format(job.module_name, job.name))
+ def setup(self):
+ if not self.load_config():
+ return False
- self.checked_jobs[job.module_name].append(check_name)
- prepared.append(job)
+ if not self.config['enabled']:
+ self.log.info('disabled in the configuration file')
+ return False
- return prepared
+ statuses = self.load_job_statuses()
- def serve(self):
- gc_run = self.config['gc_run']
- gc_interval = self.config['gc_interval']
+ self.jobs = self.create_jobs(statuses)
+ if not self.jobs:
+ self.log.info('no jobs to run')
+ return False
+
+ if not IS_ATTY:
+ abs_path = os.path.join(DIRS.var_lib, self.jobs_status_dump_name)
+ self.saver = CachedFileSaver(abs_path)
+ return True
- while True:
- self.runs += 1
+ def start_jobs(self, *jobs):
+ for job in jobs:
+ if job.status not in (JOB_STATUS_INIT, JOB_STATUS_RECOVERING):
+ continue
- # threads: main + heartbeat
- if threading.active_count() <= 2 and not self.auto_detection_jobs:
- return
+ if job.actual_name in self.started_jobs[job.module_name]:
+ self.log.info('{0}[{1}] : already served by another job, skipping it'.format(
+ job.module_name, job.real_name))
+ job.status = JOB_STATUS_DROPPED
+ continue
- time.sleep(1)
+ if not job.is_inited():
+ try:
+ job.init()
+ except Exception as error:
+ self.log.warning("{0}[{1}] : unhandled exception on init : {2}, skipping the job",
+ job.module_name, job.real_name, repr(error))
+ job.status = JOB_STATUS_DROPPED
+ continue
- if gc_run and self.runs % gc_interval == 0:
- v = gc.collect()
- self.log.debug('GC collection run result: {0}'.format(v))
+ try:
+ ok = job.check()
+ except Exception as error:
+ self.log.warning("{0}[{1}] : unhandled exception on check : {2}, skipping the job",
+ job.module_name, job.real_name, repr(error))
+ job.status = JOB_STATUS_DROPPED
+ continue
+ if not ok:
+ self.log.info('{0}[{1}] : check failed'.format(job.module_name, job.real_name))
+ job.status = JOB_STATUS_RECOVERING if job.need_to_recheck() else JOB_STATUS_DROPPED
+ continue
+ self.log.info('{0}[{1}] : check success'.format(job.module_name, job.real_name))
+
+ try:
+ job.create()
+ except Exception as error:
+ self.log.error("{0}[{1}] : unhandled exception on create : {2}, skipping the job",
+ job.module_name, job.real_name, repr(error))
+ job.status = JOB_STATUS_DROPPED
+ continue
- self.auto_detection_jobs = [job for job in self.auto_detection_jobs if not self.retry_job(job)]
+ self.started_jobs[job.module_name] = job.actual_name
+ job.status = JOB_STATUS_ACTIVE
+ job.start()
- def retry_job(self, job):
- stop_retrying = True
- retry_later = False
+ @staticmethod
+ def keep_alive():
+ if not IS_ATTY:
+ safe_print('\n')
+
+ def garbage_collection(self):
+ if self.config['gc_run'] and self.runs % self.config['gc_interval'] == 0:
+ v = gc.collect()
+ self.log.debug('GC collection run result: {0}'.format(v))
+
+ def restart_recovering_jobs(self):
+ for job in self.jobs:
+ if job.status != JOB_STATUS_RECOVERING:
+ continue
+ if self.runs % job.autodetection_retry != 0:
+ continue
+ self.start_jobs(job)
- if self.runs % job.autodetection_retry() != 0:
- return retry_later
+ def cleanup_jobs(self):
+ self.jobs = [j for j in self.jobs if j.status != JOB_STATUS_DROPPED]
- check_name = job.override_name or job.name
- if check_name in self.checked_jobs[job.module_name]:
- self.log.info("{0}[{1}]: already served by another job, give up on retrying".format(
- job.module_name, job.name))
- return stop_retrying
+ def have_alive_jobs(self):
+ return next(
+ (True for job in self.jobs if job.status in (JOB_STATUS_RECOVERING, JOB_STATUS_ACTIVE)),
+ False,
+ )
+ def save_job_statuses(self):
+ if self.saver is None:
+ return
+ if self.runs % 10 != 0:
+ return
+ dump = JobsStatuses().from_jobs(self.jobs).dump()
try:
- ok = job.check()
+ self.saver.save(dump)
except Exception as error:
- self.log.warning("{0}[{1}] : unhandled exception on recheck : {2}, give up on retrying".format(
- job.module_name, job.name, error))
- return stop_retrying
+ self.log.error("error on saving jobs statuses dump : {0}".format(repr(error)))
- if not ok:
- self.log.info('{0}[{1}] : recheck failed, will retry in {2} second(s)'.format(
- job.module_name, job.name, job.autodetection_retry()))
- return retry_later
- self.log.info('{0}[{1}] : recheck successful'.format(job.module_name, job.name))
+ def serve_once(self):
+ if not self.have_alive_jobs():
+ self.log.info('no jobs to serve')
+ return False
+
+ time.sleep(1)
+ self.runs += 1
- if not job.create():
- return stop_retrying
+ self.keep_alive()
+ self.garbage_collection()
+ self.cleanup_jobs()
+ self.restart_recovering_jobs()
+ self.save_job_statuses()
+ return True
- job.post_check(int(self.min_update_every))
- self.checked_jobs[job.module_name].append(check_name)
- JobRunner(job).start()
+ def serve(self):
+ while self.serve_once():
+ pass
- return stop_retrying
+ def run(self):
+ self.start_jobs(*self.jobs)
+ self.serve()
-def parse_cmd():
+def parse_command_line():
opts = sys.argv[:][1:]
+
debug = False
trace = False
update_every = 1
modules_to_run = list()
- v = next((opt for opt in opts if opt.isdigit() and int(opt) >= 1), None)
- if v:
- update_every = v
- opts.remove(v)
+ def find_first_positive_int(values):
+ return next((v for v in values if v.isdigit() and int(v) >= 1), None)
+
+ u = find_first_positive_int(opts)
+ if u is not None:
+ update_every = int(u)
+ opts.remove(u)
if 'debug' in opts:
debug = True
opts.remove('debug')
@@ -680,54 +690,80 @@ def parse_cmd():
if opts:
modules_to_run = list(opts)
- return collections.namedtuple(
+ cmd = collections.namedtuple(
'CMD',
[
'update_every',
'debug',
'trace',
'modules_to_run',
- ],
- )(
+ ])
+ return cmd(
update_every,
debug,
trace,
- modules_to_run,
+ modules_to_run
)
+def guess_module(modules, *names):
+ def guess(n):
+ found = None
+ for i, _ in enumerate(n):
+ cur = [x for x in modules if x.startswith(name[:i + 1])]
+ if not cur:
+ return found
+ found = cur
+ return found
+
+ guessed = list()
+ for name in names:
+ name = name.lower()
+ m = guess(name)
+ if m:
+ guessed.extend(m)
+ return sorted(set(guessed))
+
+
+def disable():
+ if not IS_ATTY:
+ safe_print('DISABLE')
+ exit(0)
+
+
def main():
- cmd = parse_cmd()
- logger = PythonDLogger()
+ cmd = parse_command_line()
+ log = PythonDLogger()
if cmd.debug:
- logger.logger.severity = 'DEBUG'
+ log.logger.severity = 'DEBUG'
if cmd.trace:
- logger.log_traceback = True
+ log.log_traceback = True
- logger.info('using python v{0}'.format(PY_VERSION[0]))
+ log.info('using python v{0}'.format(PY_VERSION[0]))
- unknown_modules = set(cmd.modules_to_run) - set(AVAILABLE_MODULES)
- if unknown_modules:
- logger.error('unknown modules : {0}'.format(sorted(list(unknown_modules))))
- safe_print('DISABLE')
+ unknown = set(cmd.modules_to_run) - set(AVAILABLE_MODULES)
+ if unknown:
+ log.error('unknown modules : {0}'.format(sorted(list(unknown))))
+ guessed = guess_module(AVAILABLE_MODULES, *cmd.modules_to_run)
+ if guessed:
+ log.info('probably you meant : \n{0}'.format(pprint.pformat(guessed, width=1)))
return
- plugin = Plugin(
- cmd.update_every,
+ p = Plugin(
cmd.modules_to_run or AVAILABLE_MODULES,
+ cmd.update_every,
)
- HeartBeat(1).start()
-
- if not plugin.setup():
- safe_print('DISABLE')
- return
-
- plugin.run()
- logger.info('exiting from main...')
- plugin.shutdown()
+ try:
+ if not p.setup():
+ return
+ p.run()
+ except KeyboardInterrupt:
+ pass
+ log.info('exiting from main...')
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
+ disable()