diff options
author | Lennart Weller <lhw@ring0.de> | 2017-07-27 09:55:47 +0000 |
---|---|---|
committer | Lennart Weller <lhw@ring0.de> | 2017-07-27 09:55:47 +0000 |
commit | a133c9c3b637b1dbe7b5b053f7e2572c1950cead (patch) | |
tree | 2207939a88e96bca329457f40a9d9d18ab659dc1 /python.d/web_log.chart.py | |
parent | New upstream version 1.6.0+dfsg (diff) | |
download | netdata-a133c9c3b637b1dbe7b5b053f7e2572c1950cead.tar.xz netdata-a133c9c3b637b1dbe7b5b053f7e2572c1950cead.zip |
New upstream version 1.7.0+dfsgupstream/1.7.0+dfsg
Diffstat (limited to 'python.d/web_log.chart.py')
-rw-r--r-- | python.d/web_log.chart.py | 1179 |
1 files changed, 784 insertions, 395 deletions
diff --git a/python.d/web_log.chart.py b/python.d/web_log.chart.py index cbc8cd235..564c9f1dd 100644 --- a/python.d/web_log.chart.py +++ b/python.d/web_log.chart.py @@ -1,37 +1,51 @@ # -*- coding: utf-8 -*- # Description: web log netdata python.d module # Author: l2isbad - -from base import LogService import re import bisect from os import access, R_OK from os.path import getsize -from collections import namedtuple +from collections import namedtuple, defaultdict from copy import deepcopy +try: + from itertools import filterfalse +except ImportError: + from itertools import ifilterfalse as filterfalse +from base import LogService +import msg + priority = 60000 retries = 60 -ORDER = ['response_statuses', 'response_codes', 'bandwidth', 'response_time', 'requests_per_url', 'http_method', - 'http_version', 'requests_per_ipproto', 'clients', 'clients_all'] -CHARTS = { +ORDER_APACHE_CACHE = ['apache_cache'] + +ORDER_WEB = ['response_statuses', 'response_codes', 'bandwidth', 'response_time', 'response_time_upstream', + 'requests_per_url', 'requests_per_user_defined', 'http_method', 'http_version', + 'requests_per_ipproto', 'clients', 'clients_all'] + +ORDER_SQUID = ['squid_response_statuses', 'squid_response_codes', 'squid_detailed_response_codes', + 'squid_method', 'squid_mime_type', 'squid_hier_code', 'squid_transport_methods', + 'squid_transport_errors', 'squid_code', 'squid_handling_opts', 'squid_object_types', + 'squid_cache_events', 'squid_bytes', 'squid_duration', 'squid_clients', 'squid_clients_all'] + +CHARTS_WEB = { 'response_codes': { 'options': [None, 'Response Codes', 'requests/s', 'responses', 'web_log.response_codes', 'stacked'], 'lines': [ - ['2xx', '2xx', 'incremental'], - ['5xx', '5xx', 'incremental'], - ['3xx', '3xx', 'incremental'], - ['4xx', '4xx', 'incremental'], - ['1xx', '1xx', 'incremental'], + ['2xx', None, 'incremental'], + ['5xx', None, 'incremental'], + ['3xx', None, 'incremental'], + ['4xx', None, 'incremental'], + ['1xx', None, 'incremental'], ['0xx', 'other', 'incremental'], - ['unmatched', 'unmatched', 'incremental'] + ['unmatched', None, 'incremental'] ]}, 'bandwidth': { - 'options': [None, 'Bandwidth', 'KB/s', 'bandwidth', 'web_log.bandwidth', 'area'], + 'options': [None, 'Bandwidth', 'kilobits/s', 'bandwidth', 'web_log.bandwidth', 'area'], 'lines': [ - ['resp_length', 'received', 'incremental', 1, 1024], - ['bytes_sent', 'sent', 'incremental', -1, 1024] + ['resp_length', 'received', 'incremental', 8, 1000], + ['bytes_sent', 'sent', 'incremental', -8, 1000] ]}, 'response_time': { 'options': [None, 'Processing Time', 'milliseconds', 'timings', 'web_log.response_time', 'area'], @@ -40,6 +54,14 @@ CHARTS = { ['resp_time_max', 'max', 'incremental', 1, 1000], ['resp_time_avg', 'avg', 'incremental', 1, 1000] ]}, + 'response_time_upstream': { + 'options': [None, 'Processing Time Upstream', 'milliseconds', 'timings', + 'web_log.response_time_upstream', 'area'], + 'lines': [ + ['resp_time_upstream_min', 'min', 'incremental', 1, 1000], + ['resp_time_upstream_max', 'max', 'incremental', 1, 1000], + ['resp_time_upstream_avg', 'avg', 'incremental', 1, 1000] + ]}, 'clients': { 'options': [None, 'Current Poll Unique Client IPs', 'unique ips', 'clients', 'web_log.clients', 'stacked'], 'lines': [ @@ -77,36 +99,160 @@ CHARTS = { ['redirects', 'redirect', 'incremental', 1, 1], ['bad_requests', 'bad', 'incremental', 1, 1], ['other_requests', 'other', 'incremental', 1, 1] + ]}, + 'requests_per_url': { + 'options': [None, 'Requests Per Url', 'requests/s', 'urls', 'web_log.requests_per_url', + 'stacked'], + 'lines': [ + ['url_pattern_other', 'other', 'incremental', 1, 1] + ]}, + 'requests_per_user_defined': { + 'options': [None, 'Requests Per User Defined Pattern', 'requests/s', 'user defined', + 'web_log.requests_per_user_defined', 'stacked'], + 'lines': [ + ['user_pattern_other', 'other', 'incremental', 1, 1] ]} } -NAMED_URL_PATTERN = namedtuple('URL_PATTERN', ['description', 'pattern']) +CHARTS_APACHE_CACHE = { + 'apache_cache': { + 'options': [None, 'Apache Cached Responses', 'percent cached', 'cached', 'web_log.apache_cache_cache', + 'stacked'], + 'lines': [ + ["hit", 'cache', "percentage-of-absolute-row"], + ["miss", None, "percentage-of-absolute-row"], + ["other", None, "percentage-of-absolute-row"] + ]} +} + +CHARTS_SQUID = { + 'squid_duration': { + 'options': [None, 'Elapsed Time The Transaction Busied The Cache', + 'milliseconds', 'squid_timings', 'web_log.squid_duration', 'area'], + 'lines': [ + ['duration_min', 'min', 'incremental', 1, 1000], + ['duration_max', 'max', 'incremental', 1, 1000], + ['duration_avg', 'avg', 'incremental', 1, 1000] + ]}, + 'squid_bytes': { + 'options': [None, 'Amount Of Data Delivered To The Clients', + 'kilobits/s', 'squid_bandwidth', 'web_log.squid_bytes', 'area'], + 'lines': [ + ['bytes', 'sent', 'incremental', 8, 1000] + ]}, + 'squid_response_statuses': { + 'options': [None, 'Response Statuses', 'responses/s', 'squid_responses', 'web_log.squid_response_statuses', + 'stacked'], + 'lines': [ + ['successful_requests', 'success', 'incremental', 1, 1], + ['server_errors', 'error', 'incremental', 1, 1], + ['redirects', 'redirect', 'incremental', 1, 1], + ['bad_requests', 'bad', 'incremental', 1, 1], + ['other_requests', 'other', 'incremental', 1, 1] + ]}, + 'squid_response_codes': { + 'options': [None, 'Response Codes', 'responses/s', 'squid_responses', + 'web_log.squid_response_codes', 'stacked'], + 'lines': [ + ['2xx', None, 'incremental'], + ['5xx', None, 'incremental'], + ['3xx', None, 'incremental'], + ['4xx', None, 'incremental'], + ['1xx', None, 'incremental'], + ['0xx', None, 'incremental'], + ['other', None, 'incremental'], + ['unmatched', None, 'incremental'] + ]}, + 'squid_code': { + 'options': [None, 'Responses Per Cache Result Of The Request', + 'requests/s', 'squid_squid_cache', 'web_log.squid_code', 'stacked'], + 'lines': [ + ]}, + 'squid_detailed_response_codes': { + 'options': [None, 'Detailed Response Codes', + 'responses/s', 'squid_responses', 'web_log.squid_detailed_response_codes', 'stacked'], + 'lines': [ + ]}, + 'squid_hier_code': { + 'options': [None, 'Responses Per Hierarchy Code', + 'requests/s', 'squid_hierarchy', 'web_log.squid_hier_code', 'stacked'], + 'lines': [ + ]}, + 'squid_method': { + 'options': [None, 'Requests Per Method', + 'requests/s', 'squid_requests', 'web_log.squid_method', 'stacked'], + 'lines': [ + ]}, + 'squid_mime_type': { + 'options': [None, 'Requests Per MIME Type', + 'requests/s', 'squid_requests', 'web_log.squid_mime_type', 'stacked'], + 'lines': [ + ]}, + 'squid_clients': { + 'options': [None, 'Current Poll Unique Client IPs', 'unique ips', 'squid_clients', + 'web_log.squid_clients', 'stacked'], + 'lines': [ + ['unique_ipv4', 'ipv4', 'incremental'], + ['unique_ipv6', 'ipv6', 'incremental'] + ]}, + 'squid_clients_all': { + 'options': [None, 'All Time Unique Client IPs', 'unique ips', 'squid_clients', + 'web_log.squid_clients_all', 'stacked'], + 'lines': [ + ['unique_tot_ipv4', 'ipv4', 'absolute'], + ['unique_tot_ipv6', 'ipv6', 'absolute'] + ]}, + 'squid_transport_methods': { + 'options': [None, 'Transport Methods', 'requests/s', 'squid_squid_transport', + 'web_log.squid_transport_methods', 'stacked'], + 'lines': [ + ]}, + 'squid_transport_errors': { + 'options': [None, 'Transport Errors', 'requests/s', 'squid_squid_transport', + 'web_log.squid_transport_errors', 'stacked'], + 'lines': [ + ]}, + 'squid_handling_opts': { + 'options': [None, 'Handling Opts', 'requests/s', 'squid_squid_cache', + 'web_log.squid_handling_opts', 'stacked'], + 'lines': [ + ]}, + 'squid_object_types': { + 'options': [None, 'Object Types', 'objects/s', 'squid_squid_cache', + 'web_log.squid_object_types', 'stacked'], + 'lines': [ + ]}, + 'squid_cache_events': { + 'options': [None, 'Cache Events', 'events/s', 'squid_squid_cache', + 'web_log.squid_cache_events', 'stacked'], + 'lines': [ + ]} +} + +NAMED_PATTERN = namedtuple('PATTERN', ['description', 'func']) DET_RESP_AGGR = ['', '_1xx', '_2xx', '_3xx', '_4xx', '_5xx', '_Other'] +SQUID_CODES = dict(TCP='squid_transport_methods', UDP='squid_transport_methods', NONE='squid_transport_methods', + CLIENT='squid_handling_opts', IMS='squid_handling_opts', ASYNC='squid_handling_opts', + SWAPFAIL='squid_handling_opts', REFRESH='squid_handling_opts', SHARED='squid_handling_opts', + REPLY='squid_handling_opts', NEGATIVE='squid_object_types', STALE='squid_object_types', + OFFLINE='squid_object_types', INVALID='squid_object_types', FAIL='squid_object_types', + MODIFIED='squid_object_types', UNMODIFIED='squid_object_types', REDIRECT='squid_object_types', + HIT='squid_cache_events', MEM='squid_cache_events', MISS='squid_cache_events', + DENIED='squid_cache_events', NOFETCH='squid_cache_events', TUNNEL='squid_cache_events', + ABORTED='squid_transport_errors', TIMEOUT='squid_transport_errors') + class Service(LogService): def __init__(self, configuration=None, name=None): """ :param configuration: :param name: - # self._get_data = None # will be assigned in 'check' method. - # self.order = None # will be assigned in 'create_*_method' method. - # self.definitions = None # will be assigned in 'create_*_method' method. """ LogService.__init__(self, configuration=configuration, name=name) - # Variables from module configuration file - self.log_type = self.configuration.get('type', 'web_access') + self.log_type = self.configuration.get('type', 'web') self.log_path = self.configuration.get('path') - self.url_pattern = self.configuration.get('categories') # dict - self.custom_log_format = self.configuration.get('custom_log_format') # dict - # Instance variables - self.regex = None # will be assigned in 'find_regex' or 'find_regex_custom' method - self.data = {'bytes_sent': 0, 'resp_length': 0, 'resp_time_min': 0, 'resp_time_max': 0, - 'resp_time_avg': 0, 'unique_cur_ipv4': 0, 'unique_cur_ipv6': 0, '2xx': 0, - '5xx': 0, '3xx': 0, '4xx': 0, '1xx': 0, '0xx': 0, 'unmatched': 0, 'req_ipv4': 0, - 'req_ipv6': 0, 'unique_tot_ipv4': 0, 'unique_tot_ipv6': 0, 'successful_requests': 0, - 'redirects': 0, 'bad_requests': 0, 'server_errors': 0, 'other_requests': 0, 'GET': 0} def check(self): """ @@ -117,11 +263,18 @@ class Service(LogService): 3. "log_path' must not be empty. We need at least 1 line to find appropriate pattern to parse 4. other checks depends on log "type" """ + + log_types = dict(web=Web, apache_cache=ApacheCache, squid=Squid) + + if self.log_type not in log_types: + self.error('bad log type (%s). Supported types: %s' % (self.log_type, log_types.keys())) + return False + if not self.log_path: self.error('log path is not specified') return False - if not access(self.log_path, R_OK): + if not (self._find_recent_log_file() and access(self.log_path, R_OK)): self.error('%s not readable or not exist' % self.log_path) return False @@ -129,357 +282,237 @@ class Service(LogService): self.error('%s is empty' % self.log_path) return False + self.configuration['update_every'] = self.update_every + self.configuration['name'] = self.name + self.configuration['override_name'] = self.override_name + self.configuration['_dimensions'] = self._dimensions + self.configuration['path'] = self.log_path + + cls = log_types[self.log_type] + self.Job = cls(configuration=self.configuration) + if self.Job.check(): + self.order = self.Job.order + self.definitions = self.Job.definitions + self.info('Current log file: %s' % self.log_path) + return True + return False + + def _get_data(self): + return self.Job.get_data(self._get_raw_data()) + + +class Mixin: + def filter_data(self, raw_data): + """ + :param raw_data: list + :return: + """ + if not self.pre_filter: + return raw_data + filtered = raw_data + for elem in self.pre_filter: + if elem.description == 'filter_include': + filtered = filter(elem.func, filtered) + elif elem.description == 'filter_exclude': + filtered = filterfalse(elem.func, filtered) + return filtered + + def add_new_dimension(self, dimension_id, chart_key, dimension=None, + algorithm='incremental', multiplier=1, divisor=1): + """ + :param dimension: + :param chart_key: + :param dimension_id: + :param algorithm: + :param multiplier: + :param divisor: + :return: + """ + + self.data[dimension_id] = 0 + # SET method check if dim in _dimensions + self.conf['_dimensions'].append(dimension_id) + # UPDATE method do SET only if dim in definitions + dimension_list = list(map(str, [dimension_id, + dimension if dimension else dimension_id, + algorithm, + multiplier, + divisor])) + self.definitions[chart_key]['lines'].append(dimension_list) + job_name = find_job_name(self.conf['override_name'], self.conf['name']) + opts = self.definitions[chart_key]['options'] + chart = 'CHART %s.%s "" "%s" %s "%s" %s %s 60000 %s\n' % (job_name, chart_key, + opts[1], opts[2], opts[3], + opts[4], opts[5], self.conf['update_every']) + print(chart + "DIMENSION %s\n" % ' '.join(dimension_list)) + + def get_last_line(self): + """ + Reads last line from the log file + :return: str: + """ # Read last line (or first if there is only one line) - with open(self.log_path, 'rb') as logs: + with open(self.conf['path'], 'rb') as logs: logs.seek(-2, 2) while logs.read(1) != b'\n': logs.seek(-2, 1) if logs.tell() == 0: break last_line = logs.readline() - try: - last_line = last_line.decode() + return last_line.decode() except UnicodeDecodeError: try: - last_line = last_line.decode(encoding='utf-8') + return last_line.decode(encoding='utf-8') except (TypeError, UnicodeDecodeError) as error: - self.error(str(error)) + msg.error('web_log', str(error)) return False - if self.log_type == 'web_access': - self.unique_all_time = list() # sorted list of unique IPs - self.detailed_response_codes = self.configuration.get('detailed_response_codes', True) - self.detailed_response_aggregate = self.configuration.get('detailed_response_aggregate', True) - self.all_time = self.configuration.get('all_time', True) + @staticmethod + def error(*params): + msg.error('web_log', ' '.join(map(str, params))) - # Custom_log_format or predefined log format. - if self.custom_log_format: - match_dict, error = self.find_regex_custom(last_line) - else: - match_dict, error = self.find_regex(last_line) + @staticmethod + def info(*params): + msg.info('web_log', ' '.join(map(str, params))) - # "match_dict" is None if there are any problems - if match_dict is None: - self.error(str(error)) - return False - # self.url_pattern check - if self.url_pattern: - self.url_pattern = check_req_per_url_pattern('rpu', self.url_pattern) +class Web(Mixin): + def __init__(self, configuration): + self.conf = configuration + self.pre_filter = check_patterns('filter', self.conf.get('filter')) + self.storage = dict() + self.data = {'bytes_sent': 0, 'resp_length': 0, 'resp_time_min': 0, 'resp_time_max': 0, + 'resp_time_avg': 0, 'resp_time_upstream_min': 0, 'resp_time_upstream_max': 0, + 'resp_time_upstream_avg': 0, 'unique_cur_ipv4': 0, 'unique_cur_ipv6': 0, '2xx': 0, + '5xx': 0, '3xx': 0, '4xx': 0, '1xx': 0, '0xx': 0, 'unmatched': 0, 'req_ipv4': 0, + 'req_ipv6': 0, 'unique_tot_ipv4': 0, 'unique_tot_ipv6': 0, 'successful_requests': 0, + 'redirects': 0, 'bad_requests': 0, 'server_errors': 0, 'other_requests': 0, 'GET': 0} - self.create_access_charts(match_dict) # Create charts - self._get_data = self._get_access_data # _get_data assignment - else: - self.error('Not implemented') + def check(self): + last_line = self.get_last_line() + if not last_line: return False - - # Double check - if not self.regex: - self.error('That can not happen, but it happened. "regex" is None') - - self.info('Collected data: %s' % list(match_dict.keys())) - return True - - def find_regex_custom(self, last_line): - """ - :param last_line: str: literally last line from log file - :return: tuple where: - [0]: dict or None: match_dict or None - [1]: str: error description - - We are here only if "custom_log_format" is in logs. We need to make sure: - 1. "custom_log_format" is a dict - 2. "pattern" in "custom_log_format" and pattern is <str> instance - 3. if "time_multiplier" is in "custom_log_format" it must be <int> instance - - If all parameters is ok we need to make sure: - 1. Pattern search is success - 2. Pattern search contains named subgroups (?P<subgroup_name>) (= "match_dict") - - If pattern search is success we need to make sure: - 1. All mandatory keys ['address', 'code', 'bytes_sent', 'method', 'url'] are in "match_dict" - - If this is True we need to make sure: - 1. All mandatory key values from "match_dict" have the correct format - ("code" is integer, "method" is uppercase word, etc) - - If non mandatory keys in "match_dict" we need to make sure: - 1. All non mandatory key values from match_dict ['resp_length', 'resp_time'] have the correct format - ("resp_length" is integer or "-", "resp_time" is integer or float) - - """ - if not hasattr(self.custom_log_format, 'keys'): - return find_regex_return(msg='Custom log: "custom_log_format" is not a <dict>') - - pattern = self.custom_log_format.get('pattern') - if not (pattern and isinstance(pattern, str)): - return find_regex_return(msg='Custom log: "pattern" option is not specified or type is not <str>') - - resp_time_func = self.custom_log_format.get('time_multiplier') or 0 - - if not isinstance(resp_time_func, int): - return find_regex_return(msg='Custom log: "time_multiplier" is not an integer') - - try: - regex = re.compile(pattern) - except re.error as error: - return find_regex_return(msg='Pattern compile error: %s' % str(error)) - - match = regex.search(last_line) - if match: - match_dict = match.groupdict() or None + # Custom_log_format or predefined log format. + if self.conf.get('custom_log_format'): + match_dict, error = self.find_regex_custom(last_line) else: - return find_regex_return(msg='Custom log: pattern search FAILED') + match_dict, error = self.find_regex(last_line) + # "match_dict" is None if there are any problems if match_dict is None: - find_regex_return(msg='Custom log: search OK but contains no named subgroups' - ' (you need to use ?P<subgroup_name>)') - else: - mandatory_dict = {'address': r'[\da-f.:]+', - 'code': r'[1-9]\d{2}', - 'method': r'[A-Z]+', - 'bytes_sent': r'\d+|-'} - optional_dict = {'resp_length': r'\d+', - 'resp_time': r'[\d.]+', - 'http_version': r'\d\.\d'} - - mandatory_values = set(mandatory_dict) - set(match_dict) - if mandatory_values: - return find_regex_return(msg='Custom log: search OK but some mandatory keys (%s) are missing' - % list(mandatory_values)) - else: - for key in mandatory_dict: - if not re.search(mandatory_dict[key], match_dict[key]): - return find_regex_return(msg='Custom log: can\'t parse "%s": %s' - % (key, match_dict[key])) - - optional_values = set(optional_dict) & set(match_dict) - for key in optional_values: - if not re.search(optional_dict[key], match_dict[key]): - return find_regex_return(msg='Custom log: can\'t parse "%s": %s' - % (key, match_dict[key])) - - dot_in_time = '.' in match_dict.get('resp_time', '') - if dot_in_time: - self.resp_time_func = lambda time: time * (resp_time_func or 1000000) - else: - self.resp_time_func = lambda time: time * (resp_time_func or 1) - - self.regex = regex - return find_regex_return(match_dict=match_dict) - - def find_regex(self, last_line): - """ - :param last_line: str: literally last line from log file - :return: tuple where: - [0]: dict or None: match_dict or None - [1]: str: error description - We need to find appropriate pattern for current log file - All logic is do a regex search through the string for all predefined patterns - until we find something or fail. - """ - # REGEX: 1.IPv4 address 2.HTTP method 3. URL 4. Response code - # 5. Bytes sent 6. Response length 7. Response process time - acs_default = re.compile(r'(?P<address>[\da-f.:]+)' - r' -.*?"(?P<method>[A-Z]+)' - r' (?P<url>[^ ]+)' - r' [A-Z]+/(?P<http_version>\d\.\d)"' - r' (?P<code>[1-9]\d{2})' - r' (?P<bytes_sent>\d+|-)') - - acs_apache_ext_insert = re.compile(r'(?P<address>[\da-f.:]+)' - r' -.*?"(?P<method>[A-Z]+)' - r' (?P<url>[^ ]+)' - r' [A-Z]+/(?P<http_version>\d\.\d)"' - r' (?P<code>[1-9]\d{2})' - r' (?P<bytes_sent>\d+|-)' - r' (?P<resp_length>\d+)' - r' (?P<resp_time>\d+) ') - - acs_apache_ext_append = re.compile(r'(?P<address>[\da-f.:]+)' - r' -.*?"(?P<method>[A-Z]+)' - r' (?P<url>[^ ]+)' - r' [A-Z]+/(?P<http_version>\d\.\d)"' - r' (?P<code>[1-9]\d{2})' - r' (?P<bytes_sent>\d+|-)' - r' .*?' - r' (?P<resp_length>\d+)' - r' (?P<resp_time>\d+)' - r'(?: |$)') - - acs_nginx_ext_insert = re.compile(r'(?P<address>[\da-f.:]+)' - r' -.*?"(?P<method>[A-Z]+)' - r' (?P<url>[^ ]+)' - r' [A-Z]+/(?P<http_version>\d\.\d)"' - r' (?P<code>[1-9]\d{2})' - r' (?P<bytes_sent>\d+)' - r' (?P<resp_length>\d+)' - r' (?P<resp_time>\d+\.\d+) ') - - acs_nginx_ext_append = re.compile(r'(?P<address>[\da-f.:]+)' - r' -.*?"(?P<method>[A-Z]+)' - r' (?P<url>[^ ]+)' - r' [A-Z]+/(?P<http_version>\d\.\d)"' - r' (?P<code>[1-9]\d{2})' - r' (?P<bytes_sent>\d+)' - r' .*?' - r' (?P<resp_length>\d+)' - r' (?P<resp_time>\d+\.\d+)') - - def func_usec(time): - return time - - def func_sec(time): - return time * 1000000 - - r_regex = [acs_apache_ext_insert, acs_apache_ext_append, acs_nginx_ext_insert, - acs_nginx_ext_append, acs_default] - r_function = [func_usec, func_usec, func_sec, func_sec, func_usec] - regex_function = zip(r_regex, r_function) - - match_dict = dict() - for regex, function in regex_function: - match = regex.search(last_line) - if match: - self.regex = regex - self.resp_time_func = function - match_dict = match.groupdict() - break + self.error(str(error)) + return False + self.storage['unique_all_time'] = list() + self.storage['url_pattern'] = check_patterns('url_pattern', self.conf.get('categories')) + self.storage['user_pattern'] = check_patterns('user_pattern', self.conf.get('user_defined')) - return find_regex_return(match_dict=match_dict or None, - msg='Unknown log format. You need to use "custom_log_format" feature.') + self.create_web_charts(match_dict) # Create charts + self.info('Collected data: %s' % list(match_dict.keys())) + return True - def create_access_charts(self, match_dict): + def create_web_charts(self, match_dict): """ :param match_dict: dict: regex.search.groupdict(). Ex. {'address': '127.0.0.1', 'code': '200', 'method': 'GET'} :return: - Create additional charts depending on the 'match_dict' keys and configuration file options - 1. 'time_response' chart is removed if there is no 'resp_time' in match_dict. - 2. Other stuff is just remove/add chart depending on yes/no in conf + Create/remove additional charts depending on the 'match_dict' keys and configuration file options """ + self.order = ORDER_WEB[:] + self.definitions = deepcopy(CHARTS_WEB) - def find_job_name(override_name, name): - """ - :param override_name: str: 'name' var from configuration file - :param name: str: 'job_name' from configuration file - :return: str: new job name - We need this for dynamic charts. Actually same logic as in python.d.plugin. - """ - add_to_name = override_name or name - if add_to_name: - return '_'.join(['web_log', re.sub('\s+', '_', add_to_name)]) - else: - return 'web_log' - - self.order = ORDER[:] - self.definitions = deepcopy(CHARTS) - - job_name = find_job_name(self.override_name, self.name) - - self.http_method_chart = 'CHART %s.http_method' \ - ' "" "Requests Per HTTP Method" requests/s "http methods"' \ - ' web_log.http_method stacked 11 %s\n' \ - 'DIMENSION GET GET incremental\n' % (job_name, self.update_every) - self.http_version_chart = 'CHART %s.http_version' \ - ' "" "Requests Per HTTP Version" requests/s "http versions"' \ - ' web_log.http_version stacked 12 %s\n' % (job_name, self.update_every) - - # Remove 'request_time' chart from ORDER if resp_time not in match_dict if 'resp_time' not in match_dict: self.order.remove('response_time') - # Remove 'clients_all' chart from ORDER if specified in the configuration - if not self.all_time: + if 'resp_time_upstream' not in match_dict: + self.order.remove('response_time_upstream') + + if not self.conf.get('all_time', True): self.order.remove('clients_all') + # Add 'detailed_response_codes' chart if specified in the configuration - if self.detailed_response_codes: - self.detailed_chart = list() - for prio, add_to_dim in enumerate(DET_RESP_AGGR): - self.detailed_chart.append('CHART %s.detailed_response_codes%s ""' - ' "Detailed Response Codes %s" requests/s responses' - ' web_log.detailed_response_codes%s stacked %s %s\n' - % (job_name, add_to_dim, add_to_dim[1:], add_to_dim, - str(prio), self.update_every)) - - codes = DET_RESP_AGGR[:1] if self.detailed_response_aggregate else DET_RESP_AGGR[1:] + if self.conf.get('detailed_response_codes', True): + codes = DET_RESP_AGGR[:1] if self.conf.get('detailed_response_aggregate', True) else DET_RESP_AGGR[1:] for code in codes: self.order.append('detailed_response_codes%s' % code) - self.definitions['detailed_response_codes%s' % code] = {'options': - [None, - 'Detailed Response Codes %s' % code[1:], - 'requests/s', - 'responses', - 'web_log.detailed_response_codes%s' % code, - 'stacked'], - 'lines': []} + self.definitions['detailed_response_codes%s' % code] \ + = {'options': [None, 'Detailed Response Codes %s' % code[1:], 'requests/s', 'responses', + 'web_log.detailed_response_codes%s' % code, 'stacked'], + 'lines': []} # Add 'requests_per_url' chart if specified in the configuration - if self.url_pattern: - self.definitions['requests_per_url'] = {'options': [None, 'Requests Per Url', 'requests/s', - 'urls', 'web_log.requests_per_url', 'stacked'], - 'lines': [['rpu_other', 'other', 'incremental']]} - for elem in self.url_pattern: - self.definitions['requests_per_url']['lines'].append([elem.description, elem.description[4:], + if self.storage['url_pattern']: + for elem in self.storage['url_pattern']: + self.definitions['requests_per_url']['lines'].append([elem.description, + elem.description[12:], 'incremental']) - self.data.update({elem.description: 0}) - self.data.update({'rpu_other': 0}) + self.data[elem.description] = 0 + self.data['url_pattern_other'] = 0 else: self.order.remove('requests_per_url') - def add_new_dimension(self, dimension, line_list, chart_string, key): - """ - :param dimension: str: response status code. Ex.: '202', '499' - :param line_list: list: Ex.: ['202', '202', 'incremental'] - :param chart_string: Current string we need to pass to netdata to rebuild the chart - :param key: str: CHARTS dict key (chart name). Ex.: 'response_time' - :return: str: new chart string = previous + new dimensions - """ - self.data.update({dimension: 0}) - # SET method check if dim in _dimensions - self._dimensions.append(dimension) - # UPDATE method do SET only if dim in definitions - self.definitions[key]['lines'].append(line_list) - chart = chart_string - chart += "%s %s\n" % ('DIMENSION', ' '.join(line_list)) - print(chart) - return chart + # Add 'requests_per_user_defined' chart if specified in the configuration + if self.storage['user_pattern'] and 'user_defined' in match_dict: + for elem in self.storage['user_pattern']: + self.definitions['requests_per_user_defined']['lines'].append([elem.description, + elem.description[13:], + 'incremental']) + self.data[elem.description] = 0 + self.data['user_pattern_other'] = 0 + else: + self.order.remove('requests_per_user_defined') - def _get_access_data(self): + def get_data(self, raw_data=None): """ - Parse new log lines + Parses new log lines :return: dict OR None None if _get_raw_data method fails. In all other cases - dict. """ - raw = self._get_raw_data() - if raw is None: - return None + if not raw_data: + return None if raw_data is None else self.data + + filtered_data = self.filter_data(raw_data=raw_data) + + unique_current = set() + timings = defaultdict(lambda: dict(minimum=None, maximum=0, summary=0, count=0)) - request_time, unique_current = list(), list() - request_counter = {'count': 0, 'sum': 0} ip_address_counter = {'unique_cur_ip': 0} - for line in raw: - match = self.regex.search(line) + for line in filtered_data: + match = self.storage['regex'].search(line) if match: match_dict = match.groupdict() try: - code = ''.join([match_dict['code'][0], 'xx']) + code = match_dict['code'][0] + 'xx' self.data[code] += 1 except KeyError: self.data['0xx'] += 1 # detailed response code - if self.detailed_response_codes: - self._get_data_detailed_response_codes(match_dict['code']) + if self.conf.get('detailed_response_codes', True): + self.get_data_per_response_codes_detailed(code=match_dict['code']) # response statuses - self._get_data_statuses(match_dict['code']) + self.get_data_per_statuses(code=match_dict['code']) # requests per url - if self.url_pattern: - self._get_data_per_url(match_dict['url']) + if self.storage['url_pattern']: + self.get_data_per_pattern(row=match_dict['url'], + other='url_pattern_other', + pattern=self.storage['url_pattern']) + # requests per user defined pattern + if self.storage['user_pattern'] and 'user_defined' in match_dict: + self.get_data_per_pattern(row=match_dict['user_defined'], + other='user_pattern_other', + pattern=self.storage['user_pattern']) # requests per http method - self._get_data_http_method(match_dict['method']) + if match_dict['method'] not in self.data: + self.add_new_dimension(dimension_id=match_dict['method'], + chart_key='http_method') + self.data[match_dict['method']] += 1 # requests per http version if 'http_version' in match_dict: - self._get_data_http_version(match_dict['http_version']) + dim_id = match_dict['http_version'].replace('.', '_') + if dim_id not in self.data: + self.add_new_dimension(dimension_id=dim_id, + chart_key='http_version', + dimension=match_dict['http_version']) + self.data[dim_id] += 1 # bandwidth sent bytes_sent = match_dict['bytes_sent'] if '-' not in match_dict['bytes_sent'] else 0 self.data['bytes_sent'] += int(bytes_sent) @@ -487,92 +520,245 @@ class Service(LogService): if 'resp_length' in match_dict: self.data['resp_length'] += int(match_dict['resp_length']) if 'resp_time' in match_dict: - resp_time = self.resp_time_func(float(match_dict['resp_time'])) - bisect.insort_left(request_time, resp_time) - request_counter['count'] += 1 - request_counter['sum'] += resp_time + get_timings(timings=timings['resp_time'], + time=self.storage['func_resp_time'](float(match_dict['resp_time']))) + if 'resp_time_upstream' in match_dict and match_dict['resp_time_upstream'] != '-': + get_timings(timings=timings['resp_time_upstream'], + time=self.storage['func_resp_time'](float(match_dict['resp_time_upstream']))) # requests per ip proto proto = 'ipv4' if '.' in match_dict['address'] else 'ipv6' self.data['req_' + proto] += 1 # unique clients ips - if address_not_in_pool(self.unique_all_time, match_dict['address'], - self.data['unique_tot_ipv4'] + self.data['unique_tot_ipv6']): + if address_not_in_pool(pool=self.storage['unique_all_time'], + address=match_dict['address'], + pool_size=self.data['unique_tot_ipv4'] + self.data['unique_tot_ipv6']): self.data['unique_tot_' + proto] += 1 - if address_not_in_pool(unique_current, match_dict['address'], ip_address_counter['unique_cur_ip']): + if match_dict['address'] not in unique_current: self.data['unique_cur_' + proto] += 1 - ip_address_counter['unique_cur_ip'] += 1 + unique_current.add(match_dict['address']) else: self.data['unmatched'] += 1 # timings - if request_time: - self.data['resp_time_min'] += int(request_time[0]) - self.data['resp_time_avg'] += int(round(float(request_counter['sum']) / request_counter['count'])) - self.data['resp_time_max'] += int(request_time[-1]) + for elem in timings: + self.data[elem + '_min'] += timings[elem]['minimum'] + self.data[elem + '_avg'] += timings[elem]['summary'] / timings[elem]['count'] + self.data[elem + '_max'] += timings[elem]['maximum'] return self.data - def _get_data_detailed_response_codes(self, code): + def find_regex(self, last_line): """ - :param code: str: CODE from parsed line. Ex.: '202, '499' - :return: - Calls add_new_dimension method If the value is found for the first time + :param last_line: str: literally last line from log file + :return: tuple where: + [0]: dict or None: match_dict or None + [1]: str: error description + We need to find appropriate pattern for current log file + All logic is do a regex search through the string for all predefined patterns + until we find something or fail. """ - if code not in self.data: - if self.detailed_response_aggregate: - chart_string_copy = self.detailed_chart[0] - self.detailed_chart[0] = self.add_new_dimension(code, [code, code, 'incremental'], - chart_string_copy, 'detailed_response_codes') - else: - code_index = int(code[0]) if int(code[0]) < 6 else 6 - chart_string_copy = self.detailed_chart[code_index] - chart_name = 'detailed_response_codes' + DET_RESP_AGGR[code_index] - self.detailed_chart[code_index] = self.add_new_dimension(code, [code, code, 'incremental'], - chart_string_copy, chart_name) - self.data[code] += 1 + # REGEX: 1.IPv4 address 2.HTTP method 3. URL 4. Response code + # 5. Bytes sent 6. Response length 7. Response process time + default = re.compile(r'(?P<address>[\da-f.:]+)' + r' -.*?"(?P<method>[A-Z]+)' + r' (?P<url>[^ ]+)' + r' [A-Z]+/(?P<http_version>\d\.\d)"' + r' (?P<code>[1-9]\d{2})' + r' (?P<bytes_sent>\d+|-)') + + apache_ext_insert = re.compile(r'(?P<address>[\da-f.:]+)' + r' -.*?"(?P<method>[A-Z]+)' + r' (?P<url>[^ ]+)' + r' [A-Z]+/(?P<http_version>\d\.\d)"' + r' (?P<code>[1-9]\d{2})' + r' (?P<bytes_sent>\d+|-)' + r' (?P<resp_length>\d+)' + r' (?P<resp_time>\d+) ') + + apache_ext_append = re.compile(r'(?P<address>[\da-f.:]+)' + r' -.*?"(?P<method>[A-Z]+)' + r' (?P<url>[^ ]+)' + r' [A-Z]+/(?P<http_version>\d\.\d)"' + r' (?P<code>[1-9]\d{2})' + r' (?P<bytes_sent>\d+|-)' + r' .*?' + r' (?P<resp_length>\d+)' + r' (?P<resp_time>\d+)' + r'(?: |$)') + + nginx_ext_insert = re.compile(r'(?P<address>[\da-f.:]+)' + r' -.*?"(?P<method>[A-Z]+)' + r' (?P<url>[^ ]+)' + r' [A-Z]+/(?P<http_version>\d\.\d)"' + r' (?P<code>[1-9]\d{2})' + r' (?P<bytes_sent>\d+)' + r' (?P<resp_length>\d+)' + r' (?P<resp_time>\d+\.\d+) ') + + nginx_ext2_insert = re.compile(r'(?P<address>[\da-f.:]+)' + r' -.*?"(?P<method>[A-Z]+)' + r' (?P<url>[^ ]+)' + r' [A-Z]+/(?P<http_version>\d\.\d)"' + r' (?P<code>[1-9]\d{2})' + r' (?P<bytes_sent>\d+)' + r' (?P<resp_length>\d+)' + r' (?P<resp_time>\d+\.\d+)' + r' (?P<resp_time_upstream>[\d.-]+) ') + + nginx_ext_append = re.compile(r'(?P<address>[\da-f.:]+)' + r' -.*?"(?P<method>[A-Z]+)' + r' (?P<url>[^ ]+)' + r' [A-Z]+/(?P<http_version>\d\.\d)"' + r' (?P<code>[1-9]\d{2})' + r' (?P<bytes_sent>\d+)' + r' .*?' + r' (?P<resp_length>\d+)' + r' (?P<resp_time>\d+\.\d+)') + + def func_usec(time): + return time + + def func_sec(time): + return time * 1000000 + + r_regex = [apache_ext_insert, apache_ext_append, + nginx_ext2_insert, nginx_ext_insert, nginx_ext_append, + default] + r_function = [func_usec, func_usec, func_sec, func_sec, func_sec, func_usec] + regex_function = zip(r_regex, r_function) + + match_dict = dict() + for regex, func in regex_function: + match = regex.search(last_line) + if match: + self.storage['regex'] = regex + self.storage['func_resp_time'] = func + match_dict = match.groupdict() + break + + return find_regex_return(match_dict=match_dict or None, + msg='Unknown log format. You need to use "custom_log_format" feature.') - def _get_data_http_method(self, method): + def find_regex_custom(self, last_line): """ - :param method: str: METHOD from parsed line. Ex.: 'GET', 'POST' - :return: - Calls add_new_dimension method If the value is found for the first time + :param last_line: str: literally last line from log file + :return: tuple where: + [0]: dict or None: match_dict or None + [1]: str: error description + + We are here only if "custom_log_format" is in logs. We need to make sure: + 1. "custom_log_format" is a dict + 2. "pattern" in "custom_log_format" and pattern is <str> instance + 3. if "time_multiplier" is in "custom_log_format" it must be <int> instance + + If all parameters is ok we need to make sure: + 1. Pattern search is success + 2. Pattern search contains named subgroups (?P<subgroup_name>) (= "match_dict") + + If pattern search is success we need to make sure: + 1. All mandatory keys ['address', 'code', 'bytes_sent', 'method', 'url'] are in "match_dict" + + If this is True we need to make sure: + 1. All mandatory key values from "match_dict" have the correct format + ("code" is integer, "method" is uppercase word, etc) + + If non mandatory keys in "match_dict" we need to make sure: + 1. All non mandatory key values from match_dict ['resp_length', 'resp_time'] have the correct format + ("resp_length" is integer or "-", "resp_time" is integer or float) + """ - if method not in self.data: - chart_string_copy = self.http_method_chart - self.http_method_chart = self.add_new_dimension(method, [method, method, 'incremental'], - chart_string_copy, 'http_method') - self.data[method] += 1 + if not hasattr(self.conf.get('custom_log_format'), 'keys'): + return find_regex_return(msg='Custom log: "custom_log_format" is not a <dict>') + + pattern = self.conf.get('custom_log_format', dict()).get('pattern') + if not (pattern and isinstance(pattern, str)): + return find_regex_return(msg='Custom log: "pattern" option is not specified or type is not <str>') + + resp_time_func = self.conf.get('custom_log_format', dict()).get('time_multiplier') or 0 + + if not isinstance(resp_time_func, int): + return find_regex_return(msg='Custom log: "time_multiplier" is not an integer') + + try: + regex = re.compile(pattern) + except re.error as error: + return find_regex_return(msg='Pattern compile error: %s' % str(error)) + match = regex.search(last_line) + if not match: + return find_regex_return(msg='Custom log: pattern search FAILED') - def _get_data_http_version(self, http_version): + match_dict = match.groupdict() or None + if match_dict is None: + return find_regex_return(msg='Custom log: search OK but contains no named subgroups' + ' (you need to use ?P<subgroup_name>)') + mandatory_dict = {'address': r'[\da-f.:]+', + 'code': r'[1-9]\d{2}', + 'method': r'[A-Z]+', + 'bytes_sent': r'\d+|-'} + optional_dict = {'resp_length': r'\d+', + 'resp_time': r'[\d.]+', + 'resp_time_upstream': r'[\d.-]+', + 'http_version': r'\d\.\d'} + + mandatory_values = set(mandatory_dict) - set(match_dict) + if mandatory_values: + return find_regex_return(msg='Custom log: search OK but some mandatory keys (%s) are missing' + % list(mandatory_values)) + for key in mandatory_dict: + if not re.search(mandatory_dict[key], match_dict[key]): + return find_regex_return(msg='Custom log: can\'t parse "%s": %s' + % (key, match_dict[key])) + + optional_values = set(optional_dict) & set(match_dict) + for key in optional_values: + if not re.search(optional_dict[key], match_dict[key]): + return find_regex_return(msg='Custom log: can\'t parse "%s": %s' + % (key, match_dict[key])) + + dot_in_time = '.' in match_dict.get('resp_time', '') + if dot_in_time: + self.storage['func_resp_time'] = lambda time: time * (resp_time_func or 1000000) + else: + self.storage['func_resp_time'] = lambda time: time * (resp_time_func or 1) + + self.storage['regex'] = regex + return find_regex_return(match_dict=match_dict) + + def get_data_per_response_codes_detailed(self, code): """ - :param http_version: str: METHOD from parsed line. Ex.: '1.1', '1.0' + :param code: str: CODE from parsed line. Ex.: '202, '499' :return: Calls add_new_dimension method If the value is found for the first time """ - http_version_dim_id = http_version.replace('.', '_') - if http_version_dim_id not in self.data: - chart_string_copy = self.http_version_chart - self.http_version_chart = self.add_new_dimension(http_version_dim_id, - [http_version_dim_id, http_version, 'incremental'], - chart_string_copy, 'http_version') - self.data[http_version_dim_id] += 1 - - def _get_data_per_url(self, url): + if code not in self.data: + if self.conf.get('detailed_response_aggregate', True): + self.add_new_dimension(dimension_id=code, + chart_key='detailed_response_codes') + else: + code_index = int(code[0]) if int(code[0]) < 6 else 6 + chart_key = 'detailed_response_codes' + DET_RESP_AGGR[code_index] + self.add_new_dimension(dimension_id=code, + chart_key=chart_key) + self.data[code] += 1 + + def get_data_per_pattern(self, row, other, pattern): """ - :param url: str: URL from parsed line + :param row: str: + :param other: str: + :param pattern: named tuple: (['pattern_description', 'regular expression']) :return: Scan through string looking for the first location where patterns produce a match for all user defined patterns """ match = None - for elem in self.url_pattern: - if elem.pattern.search(url): + for elem in pattern: + if elem.func(row): self.data[elem.description] += 1 match = True break if not match: - self.data['rpu_other'] += 1 + self.data[other] += 1 - def _get_data_statuses(self, code): + def get_data_per_statuses(self, code): """ :param code: str: response status code. Ex.: '202', '499' :return: @@ -590,23 +776,209 @@ class Service(LogService): self.data['other_requests'] += 1 +class ApacheCache: + def __init__(self, configuration): + self.conf = configuration + self.order = ORDER_APACHE_CACHE + self.definitions = CHARTS_APACHE_CACHE + + @staticmethod + def check(): + return True + + @staticmethod + def get_data(raw_data=None): + data = dict(hit=0, miss=0, other=0) + if not raw_data: + return None if raw_data is None else data + + for line in raw_data: + if 'cache hit' in line: + data['hit'] += 1 + elif 'cache miss' in line: + data['miss'] += 1 + else: + data['other'] += 1 + return data + + +class Squid(Mixin): + def __init__(self, configuration): + self.conf = configuration + self.order = ORDER_SQUID + self.definitions = CHARTS_SQUID + self.pre_filter = check_patterns('filter', self.conf.get('filter')) + self.storage = dict() + self.data = {'duration_max': 0, 'duration_avg': 0, 'duration_min': 0, 'bytes': 0, + '0xx': 0, '1xx': 0, '2xx': 0, '3xx': 0, '4xx': 0, '5xx': 0, + 'other': 0, 'unmatched': 0, 'unique_ipv4': 0, 'unique_ipv6': 0, + 'unique_tot_ipv4': 0, 'unique_tot_ipv6': 0, 'successful_requests': 0, + 'redirects': 0, 'bad_requests': 0, 'server_errors': 0, 'other_requests': 0 + } + + def check(self): + last_line = self.get_last_line() + if not last_line: + return False + self.storage['unique_all_time'] = list() + self.storage['regex'] = re.compile(r'[0-9.]+\s+(?P<duration>[0-9]+)' + r' (?P<client_address>[\da-f.:]+)' + r' (?P<squid_code>[A-Z_]+)/' + r'(?P<http_code>[0-9]+)' + r' (?P<bytes>[0-9]+)' + r' (?P<method>[A-Z_]+)' + r' (?P<url>[^ ]+)' + r' (?P<user>[^ ]+)' + r' (?P<hier_code>[A-Z_]+)/[\da-f.:-]+' + r' (?P<mime_type>[^\n]+)') + + match = self.storage['regex'].search(last_line) + if not match: + self.error('Regex not matches (%s)' % self.storage['regex'].pattern) + return False + self.storage['dynamic'] = { + 'http_code': + {'chart': 'squid_detailed_response_codes', + 'func_dim_id': None, + 'func_dim': None}, + 'hier_code': { + 'chart': 'squid_hier_code', + 'func_dim_id': None, + 'func_dim': lambda v: v.replace('HIER_', '')}, + 'method': { + 'chart': 'squid_method', + 'func_dim_id': None, + 'func_dim': None}, + 'mime_type': { + 'chart': 'squid_mime_type', + 'func_dim_id': lambda v: v.split('/')[0], + 'func_dim': None}} + return True + + def get_data(self, raw_data=None): + if not raw_data: + return None if raw_data is None else self.data + + filtered_data = self.filter_data(raw_data=raw_data) + + unique_ip = set() + timings = defaultdict(lambda: dict(minimum=None, maximum=0, summary=0, count=0)) + + for row in filtered_data: + match = self.storage['regex'].search(row) + if match: + match = match.groupdict() + if match['duration'] != '0': + get_timings(timings=timings['duration'], time=float(match['duration']) * 1000) + try: + self.data[match['http_code'][0] + 'xx'] += 1 + except KeyError: + self.data['other'] += 1 + + self.get_data_per_statuses(match['http_code']) + + self.get_data_per_squid_code(match['squid_code']) + + self.data['bytes'] += int(match['bytes']) + + proto = 'ipv4' if '.' in match['client_address'] else 'ipv6' + # unique clients ips + if address_not_in_pool(pool=self.storage['unique_all_time'], + address=match['client_address'], + pool_size=self.data['unique_tot_ipv4'] + self.data['unique_tot_ipv6']): + self.data['unique_tot_' + proto] += 1 + + if match['client_address'] not in unique_ip: + self.data['unique_' + proto] += 1 + unique_ip.add(match['client_address']) + + for key, values in self.storage['dynamic'].items(): + if match[key] == '-': + continue + dimension_id = values['func_dim_id'](match[key]) if values['func_dim_id'] else match[key] + if dimension_id not in self.data: + dimension = values['func_dim'](match[key]) if values['func_dim'] else dimension_id + self.add_new_dimension(dimension_id=dimension_id, + chart_key=values['chart'], + dimension=dimension) + self.data[dimension_id] += 1 + else: + self.data['unmatched'] += 1 + + for elem in timings: + self.data[elem + '_min'] += timings[elem]['minimum'] + self.data[elem + '_avg'] += timings[elem]['summary'] / timings[elem]['count'] + self.data[elem + '_max'] += timings[elem]['maximum'] + return self.data + + def get_data_per_statuses(self, code): + """ + :param code: str: response status code. Ex.: '202', '499' + :return: + """ + code_class = code[0] + if code_class == '2' or code == '304' or code_class == '1' or code == '000': + self.data['successful_requests'] += 1 + elif code_class == '3': + self.data['redirects'] += 1 + elif code_class == '4': + self.data['bad_requests'] += 1 + elif code_class == '5' or code_class == '6': + self.data['server_errors'] += 1 + else: + self.data['other_requests'] += 1 + + def get_data_per_squid_code(self, code): + """ + :param code: str: squid response code. Ex.: 'TCP_MISS', 'TCP_MISS_ABORTED' + :return: + """ + if code not in self.data: + self.add_new_dimension(dimension_id=code, chart_key='squid_code') + self.data[code] += 1 + if '_' not in code: + return + for tag in code.split('_'): + try: + chart_key = SQUID_CODES[tag] + except KeyError: + continue + if tag not in self.data: + self.add_new_dimension(dimension_id=tag, chart_key=chart_key) + self.data[tag] += 1 + + +def get_timings(timings, time): + """ + :param timings: + :param time: + :return: + """ + if timings['minimum'] is None: + timings['minimum'] = time + if time > timings['maximum']: + timings['maximum'] = time + elif time < timings['minimum']: + timings['minimum'] = time + timings['summary'] += time + timings['count'] += 1 + + def address_not_in_pool(pool, address, pool_size): """ :param pool: list of ip addresses :param address: ip address :param pool_size: current pool size - :return: True if address not in pool. False if address in pool. + :return: True if address not in pool. False otherwise. """ index = bisect.bisect_left(pool, address) if index < pool_size: if pool[index] == address: return False - else: - bisect.insort_left(pool, address) - return True - else: bisect.insort_left(pool, address) return True + bisect.insort_left(pool, address) + return True def find_regex_return(match_dict=None, msg='Generic error message'): @@ -618,36 +990,53 @@ def find_regex_return(match_dict=None, msg='Generic error message'): return match_dict, msg -def check_req_per_url_pattern(string, url_pattern): +def check_patterns(string, dimension_regex_dict): """ :param string: str: - :param url_pattern: dict: ex. {'dim1': 'pattern1>', 'dim2': '<pattern2>'} + :param dimension_regex_dict: dict: ex. {'dim1': '<pattern1>', 'dim2': '<pattern2>'} :return: list of named tuples or None: We need to make sure all patterns are valid regular expressions """ - if not hasattr(url_pattern, 'keys'): + if not hasattr(dimension_regex_dict, 'keys'): return None result = list() - def is_valid_pattern(pattern): + def valid_pattern(pattern): """ :param pattern: str :return: re.compile(pattern) or None """ if not isinstance(pattern, str): return False - else: - try: - compile_pattern = re.compile(pattern) - except re.error: - return False - else: - return compile_pattern + try: + return re.compile(pattern) + except re.error: + return False - for dimension, regex in url_pattern.items(): - valid_pattern = is_valid_pattern(regex) - if isinstance(dimension, str) and valid_pattern: - result.append(NAMED_URL_PATTERN(description='_'.join([string, dimension]), pattern=valid_pattern)) + def func_search(pattern): + def closure(v): + return pattern.search(v) + + return closure + for dimension, regex in dimension_regex_dict.items(): + valid = valid_pattern(regex) + if isinstance(dimension, str) and valid_pattern: + func = func_search(valid) + result.append(NAMED_PATTERN(description='_'.join([string, dimension]), + func=func)) return result or None + + +def find_job_name(override_name, name): + """ + :param override_name: str: 'name' var from configuration file + :param name: str: 'job_name' from configuration file + :return: str: new job name + We need this for dynamic charts. Actually the same logic as in python.d.plugin. + """ + add_to_name = override_name or name + if add_to_name: + return '_'.join(['web_log', re.sub('\s+', '_', add_to_name)]) + return 'web_log' |