summaryrefslogtreecommitdiffstats
path: root/python.d/phpfpm.chart.py
diff options
context:
space:
mode:
authorFederico Ceratto <federico.ceratto@gmail.com>2017-04-30 16:09:37 +0000
committerFederico Ceratto <federico.ceratto@gmail.com>2017-04-30 16:09:37 +0000
commit51f689a8e17ff3929acd2dbf39e936d2cd3ac723 (patch)
tree92e54f543171b69dcbc639be09d11221cf96ba28 /python.d/phpfpm.chart.py
parentNew upstream version 1.5.0+dfsg (diff)
downloadnetdata-51f689a8e17ff3929acd2dbf39e936d2cd3ac723.tar.xz
netdata-51f689a8e17ff3929acd2dbf39e936d2cd3ac723.zip
New upstream version 1.6.0+dfsgupstream/1.6.0+dfsg
Diffstat (limited to 'python.d/phpfpm.chart.py')
-rw-r--r--[-rwxr-xr-x]python.d/phpfpm.chart.py183
1 files changed, 106 insertions, 77 deletions
diff --git a/python.d/phpfpm.chart.py b/python.d/phpfpm.chart.py
index b79a35d7..7a983521 100755..100644
--- a/python.d/phpfpm.chart.py
+++ b/python.d/phpfpm.chart.py
@@ -4,6 +4,7 @@
from base import UrlService
import json
+import re
# default module values (can be overridden per job in `config`)
# update_every = 2
@@ -19,44 +20,75 @@ retries = 60
# }}
# charts order (can be overridden if you want less charts, or different order)
+
+POOL_INFO = [
+ ('active processes', 'active'),
+ ('max active processes', 'maxActive'),
+ ('idle processes', 'idle'),
+ ('accepted conn', 'requests'),
+ ('max children reached', 'reached'),
+ ('slow requests', 'slow')
+]
+
+PER_PROCESS_INFO = [
+ ('request duration', 'ReqDur'),
+ ('last request cpu', 'ReqCpu'),
+ ('last request memory', 'ReqMem')
+]
+
+
+def average(collection):
+ return sum(collection, 0.0) / max(len(collection), 1)
+
+CALC = [
+ ('min', min),
+ ('max', max),
+ ('avg', average)
+]
+
ORDER = ['connections', 'requests', 'performance', 'request_duration', 'request_cpu', 'request_mem']
CHARTS = {
'connections': {
- 'options': [None, 'PHP-FPM Active Connections', 'connections', 'active connections', 'phpfpm.connections', 'line'],
+ 'options': [None, 'PHP-FPM Active Connections', 'connections', 'active connections', 'phpfpm.connections',
+ 'line'],
'lines': [
- ["active"],
- ["maxActive", 'max active'],
- ["idle"]
+ ['active'],
+ ['maxActive', 'max active'],
+ ['idle']
]},
'requests': {
'options': [None, 'PHP-FPM Requests', 'requests/s', 'requests', 'phpfpm.requests', 'line'],
'lines': [
- ["requests", None, "incremental"]
+ ['requests', None, 'incremental']
]},
'performance': {
'options': [None, 'PHP-FPM Performance', 'status', 'performance', 'phpfpm.performance', 'line'],
'lines': [
- ["reached", 'max children reached'],
- ["slow", 'slow requests']
+ ['reached', 'max children reached'],
+ ['slow', 'slow requests']
]},
'request_duration': {
- 'options': [None, 'PHP-FPM Request Duration', 'milliseconds', 'request duration', 'phpfpm.request_duration', 'line'],
+ 'options': [None, 'PHP-FPM Request Duration', 'milliseconds', 'request duration', 'phpfpm.request_duration',
+ 'line'],
'lines': [
- ["maxReqDur", 'max request duration'],
- ["avgReqDur", 'average request duration']
+ ['minReqDur', 'min', 'absolute', 1, 1000],
+ ['maxReqDur', 'max', 'absolute', 1, 1000],
+ ['avgReqDur', 'avg', 'absolute', 1, 1000]
]},
'request_cpu': {
'options': [None, 'PHP-FPM Request CPU', 'percent', 'request CPU', 'phpfpm.request_cpu', 'line'],
'lines': [
- ["maxReqCPU", 'max request cpu'],
- ["avgReqCPU", 'average request cpu']
+ ['minReqCpu', 'min'],
+ ['maxReqCpu', 'max'],
+ ['avgReqCpu', 'avg']
]},
'request_mem': {
'options': [None, 'PHP-FPM Request Memory', 'kilobytes', 'request memory', 'phpfpm.request_mem', 'line'],
'lines': [
- ["maxReqMem", 'max request memory'],
- ["avgReqMem", 'average request memory']
+ ['minReqMem', 'min', 'absolute', 1, 1024],
+ ['maxReqMem', 'max', 'absolute', 1, 1024],
+ ['avgReqMem', 'avg', 'absolute', 1, 1024]
]}
}
@@ -64,76 +96,73 @@ CHARTS = {
class Service(UrlService):
def __init__(self, configuration=None, name=None):
UrlService.__init__(self, configuration=configuration, name=name)
- if len(self.url) == 0:
- self.url = "http://localhost/status?full&json"
+ self.url = self.configuration.get('url', 'http://localhost/status?full&json')
self.order = ORDER
self.definitions = CHARTS
- self.assignment = {"active processes": 'active',
- "max active processes": 'maxActive',
- "idle processes": 'idle',
- "accepted conn": 'requests',
- "max children reached": 'reached',
- "slow requests": 'slow'}
- self.proc_assignment = {"request duration": 'ReqDur',
- "last request cpu": 'ReqCPU',
- "last request memory": 'ReqMem'}
+ self.regex = re.compile(r'([a-z][a-z ]+): ([\d.]+)')
+ self.json = '&json' in self.url or '?json' in self.url
+ self.json_full = self.url.endswith(('?full&json', '?json&full'))
+ self.if_all_processes_running = dict([(c_name + p_name, 0) for c_name, func in CALC
+ for metric, p_name in PER_PROCESS_INFO])
def _get_data(self):
"""
Format data received from http request
:return: dict
"""
- try:
- raw = self._get_raw_data()
- except AttributeError:
+ raw = self._get_raw_data()
+ if not raw:
return None
- if '?json' in self.url or '&json' in self.url:
- try:
- raw_json = json.loads(raw)
- except ValueError:
- return None
- data = {}
- for k,v in raw_json.items():
- if k in self.assignment:
- data[self.assignment[k]] = v
-
- if '&full' in self.url or '?full' in self.url:
- c = 0
- sum_val = {}
- for proc in raw_json['processes']:
- if proc['state'] != 'Idle':
- continue
- c += 1
- for k, v in self.proc_assignment.items():
- d = proc[k]
- if v == 'ReqDur':
- d = d/1000
- if v == 'ReqMem':
- d = d/1024
- if 'max' + v not in data or data['max' + v] < d:
- data['max' + v] = d
- if 'avg' + v not in sum_val:
- sum_val['avg' + v] = 0
- data['avg' + v] = 0
- sum_val['avg' + v] += d
- if len(sum_val):
- for k, v in sum_val.items():
- data[k] = v/c
-
- if len(data) == 0:
- return None
- return data
-
- raw = raw.split('\n')
- data = {}
- for row in raw:
- tmp = row.split(":")
- if str(tmp[0]) in self.assignment:
- try:
- data[self.assignment[tmp[0]]] = int(tmp[1])
- except (IndexError, ValueError):
- pass
- if len(data) == 0:
- return None
- return data
+ raw_json = parse_raw_data_(is_json=self.json, regex=self.regex, raw_data=raw)
+
+ # Per Pool info: active connections, requests and performance charts
+ to_netdata = fetch_data_(raw_data=raw_json, metrics_list=POOL_INFO)
+
+ # Per Process Info: duration, cpu and memory charts (min, max, avg)
+ if self.json_full:
+ p_info = dict()
+ to_netdata.update(self.if_all_processes_running) # If all processes are in running state
+ # Metrics are always 0 if the process is not in Idle state because calculation is done
+ # when the request processing has terminated
+ for process in [p for p in raw_json['processes'] if p['state'] == 'Idle']:
+ p_info.update(fetch_data_(raw_data=process, metrics_list=PER_PROCESS_INFO, pid=str(process['pid'])))
+
+ if p_info:
+ for new_name in PER_PROCESS_INFO:
+ for name, function in CALC:
+ to_netdata[name + new_name[1]] = function([p_info[k] for k in p_info if new_name[1] in k])
+
+ return to_netdata or None
+
+
+def fetch_data_(raw_data, metrics_list, pid=''):
+ """
+ :param raw_data: dict
+ :param metrics_list: list
+ :param pid: str
+ :return: dict
+ """
+ result = dict()
+ for metric, new_name in metrics_list:
+ if metric in raw_data:
+ result[new_name + pid] = float(raw_data[metric])
+ return result
+
+
+def parse_raw_data_(is_json, regex, raw_data):
+ """
+ :param is_json: bool
+ :param regex: compiled regular expr
+ :param raw_data: dict
+ :return: dict
+ """
+ if is_json:
+ try:
+ return json.loads(raw_data)
+ except ValueError:
+ return dict()
+ else:
+ raw_data = ' '.join(raw_data.split())
+ return dict(regex.findall(raw_data))
+