summaryrefslogtreecommitdiffstats
path: root/collectors/python.d.plugin/dns_query_time
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--collectors/python.d.plugin/dns_query_time/Makefile.inc13
-rw-r--r--collectors/python.d.plugin/dns_query_time/README.md29
-rw-r--r--collectors/python.d.plugin/dns_query_time/dns_query_time.chart.py149
-rw-r--r--collectors/python.d.plugin/dns_query_time/dns_query_time.conf69
4 files changed, 260 insertions, 0 deletions
diff --git a/collectors/python.d.plugin/dns_query_time/Makefile.inc b/collectors/python.d.plugin/dns_query_time/Makefile.inc
new file mode 100644
index 0000000..7eca3e0
--- /dev/null
+++ b/collectors/python.d.plugin/dns_query_time/Makefile.inc
@@ -0,0 +1,13 @@
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# THIS IS NOT A COMPLETE Makefile
+# IT IS INCLUDED BY ITS PARENT'S Makefile.am
+# IT IS REQUIRED TO REFERENCE ALL FILES RELATIVE TO THE PARENT
+
+# install these files
+dist_python_DATA += dns_query_time/dns_query_time.chart.py
+dist_pythonconfig_DATA += dns_query_time/dns_query_time.conf
+
+# do not install these files, but include them in the distribution
+dist_noinst_DATA += dns_query_time/README.md dns_query_time/Makefile.inc
+
diff --git a/collectors/python.d.plugin/dns_query_time/README.md b/collectors/python.d.plugin/dns_query_time/README.md
new file mode 100644
index 0000000..e1fde74
--- /dev/null
+++ b/collectors/python.d.plugin/dns_query_time/README.md
@@ -0,0 +1,29 @@
+<!--
+title: "DNS query RTT monitoring with Netdata"
+custom_edit_url: https://github.com/netdata/netdata/edit/master/collectors/python.d.plugin/dns_query_time/README.md
+sidebar_label: "DNS query RTT"
+-->
+
+# DNS query RTT monitoring with Netdata
+
+Measures DNS query round trip time.
+
+**Requirement:**
+
+- `python-dnspython` package
+
+It produces one aggregate chart or one chart per DNS server, showing the query time.
+
+## Configuration
+
+Edit the `python.d/dns_query_time.conf` configuration file using `edit-config` from the Netdata [config
+directory](/docs/configure/nodes.md), which is typically at `/etc/netdata`.
+
+```bash
+cd /etc/netdata # Replace this path with your Netdata config directory, if different
+sudo ./edit-config python.d/dns_query_time.conf
+```
+
+---
+
+[![analytics](https://www.google-analytics.com/collect?v=1&aip=1&t=pageview&_s=1&ds=github&dr=https%3A%2F%2Fgithub.com%2Fnetdata%2Fnetdata&dl=https%3A%2F%2Fmy-netdata.io%2Fgithub%2Fcollectors%2Fpython.d.plugin%2Fdns_query_time%2FREADME&_u=MAC~&cid=5792dfd7-8dc4-476b-af31-da2fdb9f93d2&tid=UA-64295674-3)](<>)
diff --git a/collectors/python.d.plugin/dns_query_time/dns_query_time.chart.py b/collectors/python.d.plugin/dns_query_time/dns_query_time.chart.py
new file mode 100644
index 0000000..7e1cb32
--- /dev/null
+++ b/collectors/python.d.plugin/dns_query_time/dns_query_time.chart.py
@@ -0,0 +1,149 @@
+# -*- coding: utf-8 -*-
+# Description: dns_query_time netdata python.d module
+# Author: ilyam8
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from random import choice
+from socket import getaddrinfo, gaierror
+from threading import Thread
+
+try:
+ import dns.message
+ import dns.query
+ import dns.name
+
+ DNS_PYTHON = True
+except ImportError:
+ DNS_PYTHON = False
+
+try:
+ from queue import Queue
+except ImportError:
+ from Queue import Queue
+
+from bases.FrameworkServices.SimpleService import SimpleService
+
+update_every = 5
+
+
+class Service(SimpleService):
+ def __init__(self, configuration=None, name=None):
+ SimpleService.__init__(self, configuration=configuration, name=name)
+ self.order = list()
+ self.definitions = dict()
+ self.timeout = self.configuration.get('response_timeout', 4)
+ self.aggregate = self.configuration.get('aggregate', True)
+ self.domains = self.configuration.get('domains')
+ self.server_list = self.configuration.get('dns_servers')
+
+ def check(self):
+ if not DNS_PYTHON:
+ self.error("'python-dnspython' package is needed to use dns_query_time.chart.py")
+ return False
+
+ self.timeout = self.timeout if isinstance(self.timeout, int) else 4
+
+ if not all([self.domains, self.server_list,
+ isinstance(self.server_list, str), isinstance(self.domains, str)]):
+ self.error("server_list and domain_list can't be empty")
+ return False
+ else:
+ self.domains, self.server_list = self.domains.split(), self.server_list.split()
+
+ for ns in self.server_list:
+ if not check_ns(ns):
+ self.info('Bad NS: %s' % ns)
+ self.server_list.remove(ns)
+ if not self.server_list:
+ return False
+
+ data = self._get_data(timeout=1)
+
+ down_servers = [s for s in data if data[s] == -100]
+ for down in down_servers:
+ down = down[3:].replace('_', '.')
+ self.info('Removed due to non response %s' % down)
+ self.server_list.remove(down)
+ if not self.server_list:
+ return False
+
+ self.order, self.definitions = create_charts(aggregate=self.aggregate, server_list=self.server_list)
+ return True
+
+ def _get_data(self, timeout=None):
+ return dns_request(self.server_list, timeout or self.timeout, self.domains)
+
+
+def dns_request(server_list, timeout, domains):
+ threads = list()
+ que = Queue()
+ result = dict()
+
+ def dns_req(ns, t, q):
+ domain = dns.name.from_text(choice(domains))
+ request = dns.message.make_query(domain, dns.rdatatype.A)
+
+ try:
+ resp = dns.query.udp(request, ns, timeout=t)
+ if (resp.rcode() == dns.rcode.NOERROR and resp.answer):
+ query_time = resp.time * 1000
+ else:
+ query_time = -100
+ except dns.exception.Timeout:
+ query_time = -100
+ finally:
+ q.put({'_'.join(['ns', ns.replace('.', '_')]): query_time})
+
+ for server in server_list:
+ th = Thread(target=dns_req, args=(server, timeout, que))
+ th.start()
+ threads.append(th)
+
+ for th in threads:
+ th.join()
+ result.update(que.get())
+
+ return result
+
+
+def check_ns(ns):
+ try:
+ return getaddrinfo(ns, 'domain')[0][4][0]
+ except gaierror:
+ return False
+
+
+def create_charts(aggregate, server_list):
+ if aggregate:
+ order = ['dns_group']
+ definitions = {
+ 'dns_group': {
+ 'options': [None, 'DNS Response Time', 'ms', 'name servers', 'dns_query_time.response_time', 'line'],
+ 'lines': []
+ }
+ }
+ for ns in server_list:
+ dim = [
+ '_'.join(['ns', ns.replace('.', '_')]),
+ ns,
+ 'absolute',
+ ]
+ definitions['dns_group']['lines'].append(dim)
+
+ return order, definitions
+ else:
+ order = [''.join(['dns_', ns.replace('.', '_')]) for ns in server_list]
+ definitions = dict()
+
+ for ns in server_list:
+ definitions[''.join(['dns_', ns.replace('.', '_')])] = {
+ 'options': [None, 'DNS Response Time', 'ms', ns, 'dns_query_time.response_time', 'area'],
+ 'lines': [
+ [
+ '_'.join(['ns', ns.replace('.', '_')]),
+ ns,
+ 'absolute',
+ ]
+ ]
+ }
+ return order, definitions
diff --git a/collectors/python.d.plugin/dns_query_time/dns_query_time.conf b/collectors/python.d.plugin/dns_query_time/dns_query_time.conf
new file mode 100644
index 0000000..9c0838e
--- /dev/null
+++ b/collectors/python.d.plugin/dns_query_time/dns_query_time.conf
@@ -0,0 +1,69 @@
+# netdata python.d.plugin configuration for dns_query_time
+#
+# This file is in YaML format. Generally the format is:
+#
+# name: value
+#
+# There are 2 sections:
+# - global variables
+# - one or more JOBS
+#
+# JOBS allow you to collect values from multiple sources.
+# Each source will have its own set of charts.
+#
+# JOB parameters have to be indented (using spaces only, example below).
+
+# ----------------------------------------------------------------------
+# Global Variables
+# These variables set the defaults for all JOBs, however each JOB
+# may define its own, overriding the defaults.
+
+# update_every sets the default data collection frequency.
+# If unset, the python.d.plugin default is used.
+# update_every: 1
+
+# priority controls the order of charts at the netdata dashboard.
+# Lower numbers move the charts towards the top of the page.
+# If unset, the default for python.d.plugin is used.
+# priority: 60000
+
+# penalty indicates whether to apply penalty to update_every in case of failures.
+# Penalty will increase every 5 failed updates in a row. Maximum penalty is 10 minutes.
+# penalty: yes
+
+# autodetection_retry sets the job re-check interval in seconds.
+# The job is not deleted if check fails.
+# Attempts to start the job are made once every autodetection_retry.
+# This feature is disabled by default.
+# autodetection_retry: 0
+
+# ----------------------------------------------------------------------
+# JOBS (data collection sources)
+#
+# The default JOBS share the same *name*. JOBS with the same name
+# are mutually exclusive. Only one of them will be allowed running at
+# any time. This allows autodetection to try several alternatives and
+# pick the one that works.
+#
+# Any number of jobs is supported.
+#
+# All python.d.plugin JOBS (for all its modules) support a set of
+# predefined parameters. These are:
+#
+# job_name:
+# name: myname # the JOB's name as it will appear at the
+# # dashboard (by default is the job_name)
+# # JOBs sharing a name are mutually exclusive
+# update_every: 1 # the JOB's data collection frequency
+# priority: 60000 # the JOB's order on the dashboard
+# penalty: yes # the JOB's penalty
+# autodetection_retry: 0 # the JOB's re-check interval in seconds
+#
+# Additionally to the above, dns_query_time also supports the following:
+#
+# dns_servers: 'dns servers' # List of dns servers to query
+# domains: 'domains' # List of domains
+# aggregate: yes/no # Aggregate all servers in one chart or not
+# response_timeout: 4 # Dns query response timeout (query = -100 if response time > response_time)
+#
+# ---------------------------------------------------------------------- \ No newline at end of file