Changes between Version 2 and Version 3 of NetdataDHCP


Ignore:
Timestamp:
Nov 4, 2019, 10:58:50 AM (4 years ago)
Author:
mirek@…
Comment:

--

Legend:

Unmodified
Added
Removed
Modified
  • NetdataDHCP

    v2 v3  
    1 {{{etc/health.d/dhcpd.conf}}}
    2 
    3 {{{
    4 template: last_collected_secs
    5       on: dhcpd_log.message_types
    6 families: *
    7     calc: $now - $last_collected_t
    8    units: seconds ago
    9    every: 10s
    10     warn: $this > (($status >= $WARNING)  ? ($update_every) : ( 5 * $update_every))
    11     crit: $this > (($status == $CRITICAL) ? ($update_every) : (60 * $update_every))
    12    delay: down 5m multiplier 1.5 max 1h
    13     info: number of seconds since the last successful data collection
    14       to: webmaster
    15 
    16    alarm: 1m_discovers
    17       on: dhcpd_log.message_types
    18   lookup: average -5m unaligned of DISCOVER
    19    hosts: *
    20    units: messages/s
    21    every: 10s
    22           warn: $this > 40
    23                 crit: $this > 50
    24     info: the rate of DHCPDISCOVER messages over the last minute
    25 
    26    alarm: 1m_requests
    27       on: dhcpd_log.message_types
    28   lookup: average -15m unaligned of REQUEST
    29    hosts: *
    30    units: messages/s
    31    every: 10s
    32           warn: $this > 40
    33                 crit: $this < 1
    34     info: the rate of DHCPREQUEST messages over the last minute
    35 }}}
    36 
    37 {{{etc/python.d/dhcpd.conf}}}
    38 
    39 {{{
    40 name: DHCP
    41 
    42 # freebsd
    43 isc_dhcpd4:
    44   name: 'log'
    45   path: '/var/log/dhcpd.log'
    46 }}}
    47 
    48 {{{libexec/netdata/python.d/dhcpd.chart.py}}}
    49 
    50 {{{#!python
    51 # -*- coding: utf-8 -*-
    52 # Description: dhcpd log netdata python.d module
    53 # Author: betelgeuse
    54 
    55 import bisect
    56 import re
    57 import os
    58 import sys
    59 import warnings
    60 
    61 from collections import namedtuple, defaultdict
    62 from copy import deepcopy
    63 
    64 try:
    65     from itertools import filterfalse
    66 except ImportError:
    67     from itertools import ifilter as filter
    68     from itertools import ifilterfalse as filterfalse
    69 
    70 from bases.collection import read_last_line
    71 from bases.FrameworkServices.LogService import LogService
    72 
    73 
    74 ORDER_DHCP = ['message_types']
    75 
    76 CHARTS_DHCP = {
    77     'message_types': {
    78         'options': [None, 'Message types', 'messages/s', 'messages', 'dhcp.message_codes', 'stacked'],
    79         'lines': [
    80             ['DISCOVER', None, 'incremental'],
    81             ['OFFER', None, 'incremental'],
    82             ['REQUEST', None, 'incremental'],
    83             ['ACK', None, 'incremental'],
    84             ['NAK', None, 'incremental'],
    85             ['DECLINE', None, 'incremental'],
    86             ['INFORM', None, 'incremental'],
    87             ['LEASEQUERY', None, 'incremental'],
    88             ['RELEASE', None, 'incremental'],
    89         ]}
    90 }
    91 
    92 NAMED_PATTERN = namedtuple('PATTERN', ['description', 'func'])
    93 
    94 DET_RESP_AGGR = ['', 'DISCOVER', 'OFFER', 'REQUEST', 'ACK', 'NAK', 'DECLINE', 'INFORM', 'LEASEQUERY', 'RELEASE', 'OTHER']
    95 
    96 REQUEST_REGEX = re.compile(r'DHCP(?P<message_type>[A-Z]+) ')
    97 
    98 
    99 class Service(LogService):
    100     def __init__(self, configuration=None, name=None):
    101         """
    102         :param configuration:
    103         :param name:
    104         """
    105         LogService.__init__(self, configuration=configuration, name=name)
    106         self.configuration = configuration
    107         self.log_path = self.configuration.get('path')
    108         self.job = None
    109 
    110     def check(self):
    111         """
    112         :return: bool
    113 
    114         1. "log_path" is specified in the module configuration file
    115         2. "log_path" must be readable by netdata user and must exist
    116         3. "log_path' must not be empty. We need at least 1 line to find appropriate pattern to parse
    117         4. other checks depends on log "type"
    118         """
    119 
    120         log_type = self.configuration.get('type', 'dhcpd')
    121         log_types = dict(dhcpd=Dhcp)
    122 
    123         if log_type not in log_types:
    124             self.error("bad log type {log_type}. Supported types: {types}".format(log_type=log_type,
    125                                                                                   types=log_types.keys()))
    126             return False
    127 
    128         if not self.log_path:
    129             self.error('log path is not specified')
    130             return False
    131 
    132         if not (self._find_recent_log_file() and os.access(self.log_path, os.R_OK)):
    133             self.error('{log_file} not readable or not exist'.format(log_file=self.log_path))
    134             return False
    135 
    136         if not os.path.getsize(self.log_path):
    137             self.error('{log_file} is empty'.format(log_file=self.log_path))
    138             return False
    139 
    140         self.job = log_types[log_type](self)
    141         if self.job.check():
    142             self.order = self.job.order
    143             self.definitions = self.job.definitions
    144             return True
    145         return False
    146 
    147     def _get_data(self):
    148         return self.job.get_data(self._get_raw_data())
    149 
    150 
    151 class Dhcp:
    152     def __init__(self, service):
    153         self.service = service
    154         self.order = ORDER_DHCP[:]
    155         self.definitions = deepcopy(CHARTS_DHCP)
    156         self.pre_filter = check_patterns('filter', self.configuration.get('filter'))
    157         self.storage = dict()
    158         self.data = {'DISCOVER': 0, 'OFFER': 0, 'REQUEST': 0, 'ACK': 0, 'NAK': 0, 'DECLINE': 0, 'INFORM': 0, 'LEASEQUERY': 0, 'RELEASE': 0, 'unmatched': 0, 'OTHER': 0}
    159 
    160     def __getattr__(self, item):
    161         return getattr(self.service, item)
    162 
    163     def check(self):
    164         last_line = read_last_line(self.log_path)
    165         if not last_line:
    166             return False
    167         match_dict, error = self.find_regex(last_line)
    168 
    169         # "match_dict" is None if there are any problems
    170         if match_dict is None:
    171             self.error(error)
    172             return False
    173 
    174         self.info('Collected data: %s' % list(match_dict.keys()))
    175         return True
    176 
    177     def get_data(self, raw_data=None):
    178         """
    179         Parses new log lines
    180         :return: dict OR None
    181         None if _get_raw_data method fails.
    182         In all other cases - dict.
    183         """
    184         if not raw_data:
    185             return None if raw_data is None else self.data
    186 
    187         filtered_data = filter_data(raw_data=raw_data, pre_filter=self.pre_filter)
    188 
    189         for line in filtered_data:
    190             match = self.storage['regex'].search(line)
    191             if match:
    192                 match_dict = match.groupdict()
    193                 try:
    194                     mtype = match_dict['message_type']
    195                     self.data[mtype] += 1
    196                 except KeyError as e:
    197                     warnings.warn(e)
    198                     self.data['OTHER'] += 1
    199             else:
    200                 self.data['unmatched'] += 1
    201 
    202         return self.data
    203 
    204     def find_regex(self, last_line):
    205         """
    206         :param last_line: str: literally last line from log file
    207         :return: tuple where:
    208         [0]: dict or None:  match_dict or None
    209         [1]: str: error description
    210         We need to find appropriate pattern for current log file
    211         All logic is do a regex search through the string for all predefined patterns
    212         until we find something or fail.
    213         """
    214         default = re.compile(r'DHCP(?P<message_type>[A-Z]+) ')
    215 
    216         def func_usec(time):
    217             return time
    218 
    219         def func_sec(time):
    220             return time * 1000000
    221 
    222         r_regex = [default]
    223         r_function = [func_usec, func_usec, func_sec, func_sec, func_sec, func_usec]
    224         regex_function = zip(r_regex, r_function)
    225 
    226         match_dict = dict()
    227         for regex, func in regex_function:
    228             match = regex.search(last_line)
    229             if match:
    230                 self.storage['regex'] = regex
    231                 match_dict = match.groupdict()
    232                 break
    233 
    234         return find_regex_return(match_dict=match_dict or None,
    235                                  msg='Unknown log format. You need to use "custom_log_format" feature.')
    236 
    237 def find_regex_return(match_dict=None, msg='Generic error message'):
    238     """
    239     :param match_dict: dict: re.search.groupdict() or None
    240     :param msg: str: error description
    241     :return: tuple:
    242     """
    243     return match_dict, msg
    244 
    245 
    246 def check_patterns(string, dimension_regex_dict):
    247     """
    248     :param string: str:
    249     :param dimension_regex_dict: dict: ex. {'dim1': '<pattern1>', 'dim2': '<pattern2>'}
    250     :return: list of named tuples or None:
    251      We need to make sure all patterns are valid regular expressions
    252     """
    253     if not hasattr(dimension_regex_dict, 'keys'):
    254         return None
    255 
    256     result = list()
    257 
    258     def valid_pattern(pattern):
    259         """
    260         :param pattern: str
    261         :return: re.compile(pattern) or None
    262         """
    263         if not isinstance(pattern, str):
    264             return False
    265         try:
    266             return re.compile(pattern)
    267         except re.error:
    268             return False
    269 
    270     def func_search(pattern):
    271         def closure(v):
    272             return pattern.search(v)
    273 
    274         return closure
    275 
    276     for dimension, regex in dimension_regex_dict.items():
    277         valid = valid_pattern(regex)
    278         if isinstance(dimension, str) and valid_pattern:
    279             func = func_search(valid)
    280             result.append(NAMED_PATTERN(description='_'.join([string, dimension]),
    281                                         func=func))
    282     return result or None
    283 
    284 
    285 def filter_data(raw_data, pre_filter):
    286     """
    287     :param raw_data:
    288     :param pre_filter:
    289     :return:
    290     """
    291 
    292     if not pre_filter:
    293         return raw_data
    294     filtered = raw_data
    295     for elem in pre_filter:
    296         if elem.description == 'filter_include':
    297             filtered = filter(elem.func, filtered)
    298         elif elem.description == 'filter_exclude':
    299             filtered = filterfalse(elem.func, filtered)
    300     return filtered
    301 }}}