NetdataDHCP: libexec_netdata_python_d_dhcpd.chart.py

File libexec_netdata_python_d_dhcpd.chart.py, 7.8 KB (added by mirek@…, 4 years ago)

libexec/netdata/python.d/dhcpd.chart.py

Line 
1# -*- coding: utf-8 -*-
2# Description: dhcpd log netdata python.d module
3# Author: betelgeuse
4
5import bisect
6import re
7import os
8import sys
9import warnings
10
11from collections import namedtuple, defaultdict
12from copy import deepcopy
13
14try:
15 from itertools import filterfalse
16except ImportError:
17 from itertools import ifilter as filter
18 from itertools import ifilterfalse as filterfalse
19
20from bases.collection import read_last_line
21from bases.FrameworkServices.LogService import LogService
22
23
24ORDER_DHCP = ['message_types']
25
26CHARTS_DHCP = {
27 'message_types': {
28 'options': [None, 'Message types', 'messages/s', 'messages', 'dhcp.message_codes', 'stacked'],
29 'lines': [
30 ['DISCOVER', None, 'incremental'],
31 ['OFFER', None, 'incremental'],
32 ['REQUEST', None, 'incremental'],
33 ['ACK', None, 'incremental'],
34 ['NAK', None, 'incremental'],
35 ['DECLINE', None, 'incremental'],
36 ['INFORM', None, 'incremental'],
37 ['LEASEQUERY', None, 'incremental'],
38 ['RELEASE', None, 'incremental'],
39 ]}
40}
41
42NAMED_PATTERN = namedtuple('PATTERN', ['description', 'func'])
43
44DET_RESP_AGGR = ['', 'DISCOVER', 'OFFER', 'REQUEST', 'ACK', 'NAK', 'DECLINE', 'INFORM', 'LEASEQUERY', 'RELEASE', 'OTHER']
45
46REQUEST_REGEX = re.compile(r'DHCP(?P<message_type>[A-Z]+) ')
47
48
49class Service(LogService):
50 def __init__(self, configuration=None, name=None):
51 """
52 :param configuration:
53 :param name:
54 """
55 LogService.__init__(self, configuration=configuration, name=name)
56 self.configuration = configuration
57 self.log_path = self.configuration.get('path')
58 self.job = None
59
60 def check(self):
61 """
62 :return: bool
63
64 1. "log_path" is specified in the module configuration file
65 2. "log_path" must be readable by netdata user and must exist
66 3. "log_path' must not be empty. We need at least 1 line to find appropriate pattern to parse
67 4. other checks depends on log "type"
68 """
69
70 log_type = self.configuration.get('type', 'dhcpd')
71 log_types = dict(dhcpd=Dhcp)
72
73 if log_type not in log_types:
74 self.error("bad log type {log_type}. Supported types: {types}".format(log_type=log_type,
75 types=log_types.keys()))
76 return False
77
78 if not self.log_path:
79 self.error('log path is not specified')
80 return False
81
82 if not (self._find_recent_log_file() and os.access(self.log_path, os.R_OK)):
83 self.error('{log_file} not readable or not exist'.format(log_file=self.log_path))
84 return False
85
86 if not os.path.getsize(self.log_path):
87 self.error('{log_file} is empty'.format(log_file=self.log_path))
88 return False
89
90 self.job = log_types[log_type](self)
91 if self.job.check():
92 self.order = self.job.order
93 self.definitions = self.job.definitions
94 return True
95 return False
96
97 def _get_data(self):
98 return self.job.get_data(self._get_raw_data())
99
100
101class Dhcp:
102 def __init__(self, service):
103 self.service = service
104 self.order = ORDER_DHCP[:]
105 self.definitions = deepcopy(CHARTS_DHCP)
106 self.pre_filter = check_patterns('filter', self.configuration.get('filter'))
107 self.storage = dict()
108 self.data = {'DISCOVER': 0, 'OFFER': 0, 'REQUEST': 0, 'ACK': 0, 'NAK': 0, 'DECLINE': 0, 'INFORM': 0, 'LEASEQUERY': 0, 'RELEASE': 0, 'unmatched': 0, 'OTHER': 0}
109
110 def __getattr__(self, item):
111 return getattr(self.service, item)
112
113 def check(self):
114 last_line = read_last_line(self.log_path)
115 if not last_line:
116 return False
117 match_dict, error = self.find_regex(last_line)
118
119 # "match_dict" is None if there are any problems
120 if match_dict is None:
121 self.error(error)
122 return False
123
124 self.info('Collected data: %s' % list(match_dict.keys()))
125 return True
126
127 def get_data(self, raw_data=None):
128 """
129 Parses new log lines
130 :return: dict OR None
131 None if _get_raw_data method fails.
132 In all other cases - dict.
133 """
134 if not raw_data:
135 return None if raw_data is None else self.data
136
137 filtered_data = filter_data(raw_data=raw_data, pre_filter=self.pre_filter)
138
139 for line in filtered_data:
140 match = self.storage['regex'].search(line)
141 if match:
142 match_dict = match.groupdict()
143 try:
144 mtype = match_dict['message_type']
145 self.data[mtype] += 1
146 except KeyError as e:
147 warnings.warn(e)
148 self.data['OTHER'] += 1
149 else:
150 self.data['unmatched'] += 1
151
152 return self.data
153
154 def find_regex(self, last_line):
155 """
156 :param last_line: str: literally last line from log file
157 :return: tuple where:
158 [0]: dict or None: match_dict or None
159 [1]: str: error description
160 We need to find appropriate pattern for current log file
161 All logic is do a regex search through the string for all predefined patterns
162 until we find something or fail.
163 """
164 default = re.compile(r'DHCP(?P<message_type>[A-Z]+) ')
165
166 def func_usec(time):
167 return time
168
169 def func_sec(time):
170 return time * 1000000
171
172 r_regex = [default]
173 r_function = [func_usec, func_usec, func_sec, func_sec, func_sec, func_usec]
174 regex_function = zip(r_regex, r_function)
175
176 match_dict = dict()
177 for regex, func in regex_function:
178 match = regex.search(last_line)
179 if match:
180 self.storage['regex'] = regex
181 match_dict = match.groupdict()
182 break
183
184 return find_regex_return(match_dict=match_dict or None,
185 msg='Unknown log format. You need to use "custom_log_format" feature.')
186
187def find_regex_return(match_dict=None, msg='Generic error message'):
188 """
189 :param match_dict: dict: re.search.groupdict() or None
190 :param msg: str: error description
191 :return: tuple:
192 """
193 return match_dict, msg
194
195
196def check_patterns(string, dimension_regex_dict):
197 """
198 :param string: str:
199 :param dimension_regex_dict: dict: ex. {'dim1': '<pattern1>', 'dim2': '<pattern2>'}
200 :return: list of named tuples or None:
201 We need to make sure all patterns are valid regular expressions
202 """
203 if not hasattr(dimension_regex_dict, 'keys'):
204 return None
205
206 result = list()
207
208 def valid_pattern(pattern):
209 """
210 :param pattern: str
211 :return: re.compile(pattern) or None
212 """
213 if not isinstance(pattern, str):
214 return False
215 try:
216 return re.compile(pattern)
217 except re.error:
218 return False
219
220 def func_search(pattern):
221 def closure(v):
222 return pattern.search(v)
223
224 return closure
225
226 for dimension, regex in dimension_regex_dict.items():
227 valid = valid_pattern(regex)
228 if isinstance(dimension, str) and valid_pattern:
229 func = func_search(valid)
230 result.append(NAMED_PATTERN(description='_'.join([string, dimension]),
231 func=func))
232 return result or None
233
234
235def filter_data(raw_data, pre_filter):
236 """
237 :param raw_data:
238 :param pre_filter:
239 :return:
240 """
241
242 if not pre_filter:
243 return raw_data
244 filtered = raw_data
245 for elem in pre_filter:
246 if elem.description == 'filter_include':
247 filtered = filter(elem.func, filtered)
248 elif elem.description == 'filter_exclude':
249 filtered = filterfalse(elem.func, filtered)
250 return filtered