Cleanup config.nodes_of
[check_mk.git] / cmk_base / config.py
blob633c0de306d40b2c02500bf716bbf01cc6924b30
1 #!/usr/bin/env python
2 # -*- encoding: utf-8; py-indent-offset: 4 -*-
3 # +------------------------------------------------------------------+
4 # | ____ _ _ __ __ _ __ |
5 # | / ___| |__ ___ ___| | __ | \/ | |/ / |
6 # | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
7 # | | |___| | | | __/ (__| < | | | | . \ |
8 # | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
9 # | |
10 # | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
11 # +------------------------------------------------------------------+
13 # This file is part of Check_MK.
14 # The official homepage is at http://mathias-kettner.de/check_mk.
16 # check_mk is free software; you can redistribute it and/or modify it
17 # under the terms of the GNU General Public License as published by
18 # the Free Software Foundation in version 2. check_mk is distributed
19 # in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
20 # out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
21 # PARTICULAR PURPOSE. See the GNU General Public License for more de-
22 # tails. You should have received a copy of the GNU General Public
23 # License along with GNU Make; see the file COPYING. If not, write
24 # to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
25 # Boston, MA 02110-1301 USA.
27 from collections import OrderedDict
28 import ast
29 import copy
30 import inspect
31 import marshal
32 import numbers
33 import os
34 import py_compile
35 import struct
36 import sys
37 from typing import Set, Text, Any, Callable, Dict, List, Tuple, Union, Optional # pylint: disable=unused-import
39 import six
41 import cmk.utils.debug
42 import cmk.utils.paths
43 from cmk.utils.regex import regex, is_regex
44 import cmk.utils.translations
45 import cmk.utils.rulesets.tuple_rulesets
46 import cmk.utils.store as store
47 import cmk.utils
48 from cmk.utils.rulesets.ruleset_matcher import RulesetMatchObject
49 from cmk.utils.exceptions import MKGeneralException, MKTerminate
51 import cmk_base
52 import cmk_base.console as console
53 import cmk_base.default_config as default_config
54 import cmk_base.check_utils
55 import cmk_base.utils
56 import cmk_base.check_api_utils as check_api_utils
57 import cmk_base.cleanup
58 import cmk_base.piggyback as piggyback
59 import cmk_base.snmp_utils
60 from cmk_base.discovered_labels import DiscoveredHostLabelsStore
62 # TODO: Prefix helper functions with "_".
64 # This is mainly needed for pylint to detect all available
65 # configuration options during static analysis. The defaults
66 # are loaded later with load_default_config() again.
67 from cmk_base.default_config import * # pylint: disable=wildcard-import,unused-wildcard-import
69 service_service_levels = [] # type: ignore
70 host_service_levels = [] # type: ignore
73 class TimespecificParamList(list):
74 pass
77 def get_variable_names():
78 """Provides the list of all known configuration variables."""
79 return [k for k in default_config.__dict__ if k[0] != "_"]
82 def get_default_config():
83 """Provides a dictionary containing the Check_MK default configuration"""
84 cfg = {}
85 for key in get_variable_names():
86 value = getattr(default_config, key)
88 if isinstance(value, (dict, list)):
89 value = copy.deepcopy(value)
91 cfg[key] = value
92 return cfg
95 def load_default_config():
96 globals().update(get_default_config())
99 def register(name, default_value):
100 """Register a new configuration variable within Check_MK base."""
101 setattr(default_config, name, default_value)
104 def _add_check_variables_to_default_config():
105 """Add configuration variables registered by checks to config module"""
106 default_config.__dict__.update(get_check_variable_defaults())
109 def _clear_check_variables_from_default_config(variable_names):
110 """Remove previously registered check variables from the config module"""
111 for varname in variable_names:
112 try:
113 delattr(default_config, varname)
114 except AttributeError:
115 pass
118 # Load user configured values of check related configuration variables
119 # into the check module to make it available during checking.
121 # In the same step we remove the check related configuration settings from the
122 # config module because they are not needed there anymore.
124 # And also remove it from the default config (in case it was present)
125 def set_check_variables_for_checks():
126 global_dict = globals()
127 cvn = check_variable_names()
129 check_variables = {}
130 for varname in cvn:
131 check_variables[varname] = global_dict.pop(varname)
133 set_check_variables(check_variables)
134 _clear_check_variables_from_default_config(cvn)
138 # .--Read Config---------------------------------------------------------.
139 # | ____ _ ____ __ _ |
140 # | | _ \ ___ __ _ __| | / ___|___ _ __ / _(_) __ _ |
141 # | | |_) / _ \/ _` |/ _` | | | / _ \| '_ \| |_| |/ _` | |
142 # | | _ < __/ (_| | (_| | | |__| (_) | | | | _| | (_| | |
143 # | |_| \_\___|\__,_|\__,_| \____\___/|_| |_|_| |_|\__, | |
144 # | |___/ |
145 # +----------------------------------------------------------------------+
146 # | Code for reading the configuration files. |
147 # '----------------------------------------------------------------------'
150 def load(with_conf_d=True, validate_hosts=True, exclude_parents_mk=False):
151 _initialize_config()
153 vars_before_config = all_nonfunction_vars()
155 _load_config(with_conf_d, exclude_parents_mk)
156 _transform_mgmt_config_vars_from_140_to_150()
157 _initialize_derived_config_variables()
159 _perform_post_config_loading_actions()
161 if validate_hosts:
162 _verify_non_duplicate_hosts()
164 # Such validation only makes sense when all checks have been loaded
165 if all_checks_loaded():
166 verify_non_invalid_variables(vars_before_config)
167 _verify_no_deprecated_check_rulesets()
169 verify_snmp_communities_type()
172 def load_packed_config():
173 """Load the configuration for the CMK helpers of CMC
175 These files are written by PackedConfig().
177 Should have a result similar to the load() above. With the exception that the
178 check helpers would only need check related config variables.
180 The validations which are performed during load() also don't need to be performed.
182 PackedConfig().load()
185 def _initialize_config():
186 _add_check_variables_to_default_config()
187 load_default_config()
190 def _perform_post_config_loading_actions():
191 """These tasks must be performed after loading the Check_MK base configuration"""
192 # First cleanup things (needed for e.g. reloading the config)
193 cmk_base.config_cache.clear_all()
195 get_config_cache().initialize()
197 # In case the checks are not loaded yet it seems the current mode
198 # is not working with the checks. In this case also don't load the
199 # static checks into the configuration.
200 if any_check_loaded():
201 add_wato_static_checks_to_checks()
202 initialize_check_caches()
203 set_check_variables_for_checks()
206 def _load_config(with_conf_d, exclude_parents_mk):
207 helper_vars = {
208 "FOLDER_PATH": None,
211 global_dict = globals()
212 global_dict.update(helper_vars)
214 for _f in _get_config_file_paths(with_conf_d):
215 # During parent scan mode we must not read in old version of parents.mk!
216 if exclude_parents_mk and _f.endswith("/parents.mk"):
217 continue
219 try:
220 _hosts_before = set(all_hosts)
221 _clusters_before = set(clusters.keys())
223 # Make the config path available as a global variable to
224 # be used within the configuration file
225 if _f.startswith(cmk.utils.paths.check_mk_config_dir + "/"):
226 _file_path = _f[len(cmk.utils.paths.check_mk_config_dir) + 1:]
227 global_dict.update({
228 "FOLDER_PATH": os.path.dirname(_file_path),
230 else:
231 global_dict.update({
232 "FOLDER_PATH": None,
235 execfile(_f, global_dict, global_dict)
237 _new_hosts = set(all_hosts).difference(_hosts_before)
238 _new_clusters = set(clusters.keys()).difference(_clusters_before)
240 set_folder_paths(_new_hosts.union(_new_clusters), _f)
241 except Exception as e:
242 if cmk.utils.debug.enabled():
243 raise
244 elif sys.stderr.isatty():
245 console.error("Cannot read in configuration file %s: %s\n", _f, e)
246 sys.exit(1)
248 # Cleanup global helper vars
249 for helper_var in helper_vars:
250 del global_dict[helper_var]
253 def _transform_mgmt_config_vars_from_140_to_150():
254 #FIXME We have to transform some configuration variables from host attributes
255 # to cmk_base configuration variables because during the migration step from
256 # 1.4.0 to 1.5.0 some config variables are not known in cmk_base. These variables
257 # are 'management_protocol' and 'management_snmp_community'.
258 # Clean this up one day!
259 for hostname, attributes in host_attributes.iteritems():
260 for name, var in [
261 ('management_protocol', management_protocol),
262 ('management_snmp_community', management_snmp_credentials),
264 if attributes.get(name):
265 var.setdefault(hostname, attributes[name])
268 # Create list of all files to be included during configuration loading
269 def _get_config_file_paths(with_conf_d):
270 if with_conf_d:
271 list_of_files = sorted(
272 reduce(lambda a, b: a + b,
273 [["%s/%s" % (d, f)
274 for f in fs
275 if f.endswith(".mk")]
276 for d, _unused_sb, fs in os.walk(cmk.utils.paths.check_mk_config_dir)], []),
277 cmp=cmk.utils.cmp_config_paths)
278 list_of_files = [cmk.utils.paths.main_config_file] + list_of_files
279 else:
280 list_of_files = [cmk.utils.paths.main_config_file]
282 for path in [cmk.utils.paths.final_config_file, cmk.utils.paths.local_config_file]:
283 if os.path.exists(path):
284 list_of_files.append(path)
286 return list_of_files
289 def _initialize_derived_config_variables():
290 global service_service_levels, host_service_levels
291 service_service_levels = extra_service_conf.get("_ec_sl", [])
292 host_service_levels = extra_host_conf.get("_ec_sl", [])
295 def get_derived_config_variable_names():
296 """These variables are computed from other configuration variables and not configured directly.
298 The origin variable (extra_service_conf) should not be exported to the helper config. Only
299 the service levels are needed."""
300 return set(["service_service_levels", "host_service_levels"])
303 def _verify_non_duplicate_hosts():
304 duplicates = duplicate_hosts()
305 if duplicates:
306 # TODO: Raise an exception
307 console.error("Error in configuration: duplicate hosts: %s\n", ", ".join(duplicates))
308 sys.exit(3)
311 # Add WATO-configured explicit checks to (possibly empty) checks
312 # statically defined in checks.
313 def add_wato_static_checks_to_checks():
314 global checks
316 static = []
317 for entries in static_checks.values():
318 for entry in entries:
319 entry, rule_options = get_rule_options(entry)
320 if rule_options.get("disabled"):
321 continue
323 # Parameters are optional
324 if len(entry[0]) == 2:
325 checktype, item = entry[0]
326 params = None
327 else:
328 checktype, item, params = entry[0]
329 if len(entry) == 3:
330 taglist, hostlist = entry[1:3]
331 else:
332 hostlist = entry[1]
333 taglist = []
335 # Do not process manual checks that are related to not existing or have not
336 # loaded check files
337 try:
338 check_plugin_info = check_info[checktype]
339 except KeyError:
340 continue
342 # Make sure, that for dictionary based checks
343 # at least those keys defined in the factory
344 # settings are present in the parameters
345 if isinstance(params, dict):
346 def_levels_varname = check_plugin_info.get("default_levels_variable")
347 if def_levels_varname:
348 for key, value in factory_settings.get(def_levels_varname, {}).items():
349 if key not in params:
350 params[key] = value
352 static.append((taglist, hostlist, checktype, item, params))
354 # Note: We need to reverse the order of the static_checks. This is because
355 # users assume that earlier rules have precedence over later ones. For static
356 # checks that is important if there are two rules for a host with the same
357 # combination of check type and item. When the variable 'checks' is evaluated,
358 # *later* rules have precedence. This is not consistent with the rest, but a
359 # result of this "historic implementation".
360 static.reverse()
362 # Now prepend to checks. That makes that checks variable have precedence
363 # over WATO.
364 checks = static + checks
367 def initialize_check_caches():
368 single_host_checks = cmk_base.config_cache.get_dict("single_host_checks")
369 multi_host_checks = cmk_base.config_cache.get_list("multi_host_checks")
371 for entry in checks:
372 if len(entry) == 4 and isinstance(entry[0], str):
373 single_host_checks.setdefault(entry[0], []).append(entry)
374 else:
375 multi_host_checks.append(entry)
378 def set_folder_paths(new_hosts, filename):
379 if not filename.startswith(cmk.utils.paths.check_mk_config_dir):
380 return
382 path = filename[len(cmk.utils.paths.check_mk_config_dir):]
384 for hostname in strip_tags(new_hosts):
385 host_paths[hostname] = path
388 def verify_non_invalid_variables(vars_before_config):
389 # Check for invalid configuration variables
390 vars_after_config = all_nonfunction_vars()
391 ignored_variables = set([
392 'vars_before_config', 'parts', 'seen_hostnames', 'taggedhost', 'hostname',
393 'service_service_levels', 'host_service_levels'
396 found_invalid = 0
397 for name in vars_after_config:
398 if name not in ignored_variables and name not in vars_before_config:
399 console.error("Invalid configuration variable '%s'\n", name)
400 found_invalid += 1
402 if found_invalid:
403 console.error("--> Found %d invalid variables\n" % found_invalid)
404 console.error("If you use own helper variables, please prefix them with _.\n")
405 sys.exit(1)
408 def verify_snmp_communities_type():
409 # Special handling for certain deprecated variables
410 if isinstance(snmp_communities, dict):
411 console.error("ERROR: snmp_communities cannot be a dict any more.\n")
412 sys.exit(1)
415 def _verify_no_deprecated_check_rulesets():
416 deprecated_rulesets = [
417 ("services", "inventory_services"),
418 ("domino_tasks", "inv_domino_tasks"),
419 ("ps", "inventory_processes"),
420 ("logwatch", "logwatch_patterns"),
422 for check_plugin_name, varname in deprecated_rulesets:
423 check_context = get_check_context(check_plugin_name)
424 if check_context[varname]:
425 console.warning(
426 "Found rules for deprecated ruleset %r. These rules are not applied "
427 "anymore. In case you still need them, you need to migrate them by hand. "
428 "Otherwise you can remove them from your configuration." % varname)
431 def all_nonfunction_vars():
432 return set(
433 [name for name, value in globals().items() if name[0] != '_' and not callable(value)])
436 class PackedConfig(object):
437 """The precompiled host checks and the CMC Check_MK helpers use a
438 "precompiled" part of the Check_MK configuration during runtime.
440 a) They must not use the live config from etc/check_mk during
441 startup. They are only allowed to load the config activated by
442 the user.
444 b) They must not load the whole Check_MK config. Because they only
445 need the options needed for checking
448 # These variables are part of the Check_MK configuration, but are not needed
449 # by the Check_MK keepalive mode, so exclude them from the packed config
450 _skipped_config_variable_names = [
451 "define_contactgroups",
452 "define_hostgroups",
453 "define_servicegroups",
454 "service_contactgroups",
455 "host_contactgroups",
456 "service_groups",
457 "host_groups",
458 "contacts",
459 "timeperiods",
460 "extra_service_conf",
461 "extra_nagios_conf",
464 def __init__(self):
465 super(PackedConfig, self).__init__()
466 self._path = os.path.join(cmk.utils.paths.var_dir, "base", "precompiled_check_config.mk")
468 def save(self):
469 self._write(self._pack())
471 def _pack(self):
472 helper_config = ("#!/usr/bin/env python\n"
473 "# encoding: utf-8\n"
474 "# Created by Check_MK. Dump of the currently active configuration\n\n")
476 config_cache = get_config_cache()
478 # These functions purpose is to filter out hosts which are monitored on different sites
479 active_hosts = config_cache.all_active_hosts()
480 active_clusters = config_cache.all_active_clusters()
482 def filter_all_hosts(all_hosts_orig):
483 all_hosts_red = []
484 for host_entry in all_hosts_orig:
485 hostname = host_entry.split("|", 1)[0]
486 if hostname in active_hosts:
487 all_hosts_red.append(host_entry)
488 return all_hosts_red
490 def filter_clusters(clusters_orig):
491 clusters_red = {}
492 for cluster_entry, cluster_nodes in clusters_orig.items():
493 clustername = cluster_entry.split("|", 1)[0]
494 if clustername in active_clusters:
495 clusters_red[cluster_entry] = cluster_nodes
496 return clusters_red
498 def filter_hostname_in_dict(values):
499 values_red = {}
500 for hostname, attributes in values.items():
501 if hostname in active_hosts:
502 values_red[hostname] = attributes
503 return values_red
505 filter_var_functions = {
506 "all_hosts": filter_all_hosts,
507 "clusters": filter_clusters,
508 "host_attributes": filter_hostname_in_dict,
509 "ipaddresses": filter_hostname_in_dict,
510 "ipv6addresses": filter_hostname_in_dict,
511 "explicit_snmp_communities": filter_hostname_in_dict,
512 "hosttags": filter_hostname_in_dict
516 # Add modified Check_MK base settings
519 variable_defaults = get_default_config()
520 derived_config_variable_names = get_derived_config_variable_names()
522 global_variables = globals()
524 for varname in get_variable_names() + list(derived_config_variable_names):
525 if varname in self._skipped_config_variable_names:
526 continue
528 val = global_variables[varname]
530 if varname not in derived_config_variable_names and val == variable_defaults[varname]:
531 continue
533 if not self._packable(varname, val):
534 continue
536 if varname in filter_var_functions:
537 val = filter_var_functions[varname](val)
539 helper_config += "\n%s = %r\n" % (varname, val)
542 # Add modified check specific Check_MK base settings
545 check_variable_defaults = get_check_variable_defaults()
547 for varname, val in get_check_variables().items():
548 if val == check_variable_defaults[varname]:
549 continue
551 if not self._packable(varname, val):
552 continue
554 helper_config += "\n%s = %r\n" % (varname, val)
556 return helper_config
558 def _packable(self, varname, val):
559 """Checks whether or not a variable can be written to the config.mk
560 and read again from it."""
561 if isinstance(val, six.string_types + (int, bool)) or not val:
562 return True
564 try:
565 eval(repr(val))
566 return True
567 except:
568 return False
570 def _write(self, helper_config):
571 store.makedirs(os.path.dirname(self._path))
573 store.save_file(self._path + ".orig", helper_config + "\n")
575 code = compile(helper_config, '<string>', 'exec')
576 with open(self._path + ".compiled", "w") as compiled_file:
577 marshal.dump(code, compiled_file)
579 os.rename(self._path + ".compiled", self._path)
581 def load(self):
582 _initialize_config()
583 exec (marshal.load(open(self._path)), globals())
584 _perform_post_config_loading_actions()
588 # .--Host tags-----------------------------------------------------------.
589 # | _ _ _ _ |
590 # | | | | | ___ ___| |_ | |_ __ _ __ _ ___ |
591 # | | |_| |/ _ \/ __| __| | __/ _` |/ _` / __| |
592 # | | _ | (_) \__ \ |_ | || (_| | (_| \__ \ |
593 # | |_| |_|\___/|___/\__| \__\__,_|\__, |___/ |
594 # | |___/ |
595 # +----------------------------------------------------------------------+
596 # | Helper functions for dealing with host tags |
597 # '----------------------------------------------------------------------'
600 def strip_tags(tagged_hostlist):
601 # type: (List[str]) -> List[str]
602 cache = cmk_base.config_cache.get_dict("strip_tags")
604 cache_id = tuple(tagged_hostlist)
605 try:
606 return cache[cache_id]
607 except KeyError:
608 result = [h.split('|', 1)[0] for h in tagged_hostlist]
609 cache[cache_id] = result
610 return result
613 # This function should only be used during duplicate host check! It has to work like
614 # all_active_hosts() but with the difference that duplicates are not removed.
615 def _all_active_hosts_with_duplicates():
616 # type: () -> Set[str]
617 # Only available with CEE
618 if "shadow_hosts" in globals():
619 shadow_host_entries = shadow_hosts.keys()
620 else:
621 shadow_host_entries = []
623 config_cache = get_config_cache()
624 return _filter_active_hosts(config_cache, strip_tags(all_hosts) \
625 + strip_tags(clusters.keys()) \
626 + strip_tags(shadow_host_entries), keep_duplicates=True)
629 def _filter_active_hosts(config_cache, hostlist, keep_offline_hosts=False, keep_duplicates=False):
630 """Returns a set of active hosts for this site"""
631 if only_hosts is None and distributed_wato_site is None:
632 active_hosts = hostlist
634 elif only_hosts is None:
635 active_hosts = [
636 hostname for hostname in hostlist
637 if _host_is_member_of_site(config_cache, hostname, distributed_wato_site)
640 elif distributed_wato_site is None:
641 if keep_offline_hosts:
642 active_hosts = hostlist
643 else:
644 active_hosts = [
645 hostname for hostname in hostlist
646 if config_cache.in_binary_hostlist(hostname, only_hosts)
649 else:
650 active_hosts = [
651 hostname for hostname in hostlist
652 if (keep_offline_hosts or config_cache.in_binary_hostlist(hostname, only_hosts)) and
653 _host_is_member_of_site(config_cache, hostname, distributed_wato_site)
656 if keep_duplicates:
657 return active_hosts
659 return set(active_hosts)
662 def duplicate_hosts():
663 # type: () -> List[str]
664 seen_hostnames = set() # type: Set[str]
665 duplicates = set() # type: Set[str]
667 for hostname in _all_active_hosts_with_duplicates():
668 if hostname in seen_hostnames:
669 duplicates.add(hostname)
670 else:
671 seen_hostnames.add(hostname)
673 return sorted(list(duplicates))
676 # Returns a list of all hosts which are associated with this site,
677 # but have been removed by the "only_hosts" rule. Normally these
678 # are the hosts which have the tag "offline".
680 # This is not optimized for performance, so use in specific situations.
681 def all_offline_hosts():
682 # type: () -> Set[str]
683 config_cache = get_config_cache()
685 hostlist = _filter_active_hosts(
686 config_cache,
687 config_cache.all_configured_realhosts().union(config_cache.all_configured_clusters()),
688 keep_offline_hosts=True)
690 return set([
691 hostname for hostname in hostlist
692 if not config_cache.in_binary_hostlist(hostname, only_hosts)
696 def all_configured_offline_hosts():
697 # type: () -> Set[str]
698 config_cache = get_config_cache()
699 hostlist = config_cache.all_configured_realhosts().union(config_cache.all_configured_clusters())
701 return set([
702 hostname for hostname in hostlist
703 if not config_cache.in_binary_hostlist(hostname, only_hosts)
708 # .--Hosts---------------------------------------------------------------.
709 # | _ _ _ |
710 # | | | | | ___ ___| |_ ___ |
711 # | | |_| |/ _ \/ __| __/ __| |
712 # | | _ | (_) \__ \ |_\__ \ |
713 # | |_| |_|\___/|___/\__|___/ |
714 # | |
715 # +----------------------------------------------------------------------+
716 # | Helper functions for dealing with hosts. |
717 # '----------------------------------------------------------------------'
720 def _host_is_member_of_site(config_cache, hostname, site):
721 # type: (ConfigCache, str, str) -> bool
722 for tag in config_cache.get_host_config(hostname).tags:
723 if tag.startswith("site:"):
724 return site == tag[5:]
725 # hosts without a site: tag belong to all sites
726 return True
729 def get_additional_ipaddresses_of(hostname):
730 # type: (str) -> Tuple[List[str], List[str]]
731 #TODO Regarding the following configuration variables from WATO
732 # there's no inheritance, thus we use 'host_attributes'.
733 # Better would be to use cmk_base configuration variables,
734 # eg. like 'management_protocol'.
735 return (host_attributes.get(hostname, {}).get("additional_ipv4addresses", []),
736 host_attributes.get(hostname, {}).get("additional_ipv6addresses", []))
740 # Management board
744 def management_address_of(hostname):
745 attributes_of_host = host_attributes.get(hostname, {})
746 if attributes_of_host.get("management_address"):
747 return attributes_of_host["management_address"]
749 return ipaddresses.get(hostname)
752 def management_credentials_of(hostname):
753 protocol = get_config_cache().get_host_config(hostname).management_protocol
754 if protocol == "snmp":
755 credentials_variable, default_value = management_snmp_credentials, snmp_default_community
756 elif protocol == "ipmi":
757 credentials_variable, default_value = management_ipmi_credentials, None
758 elif protocol is None:
759 return None
760 else:
761 raise NotImplementedError()
763 # First try to use the explicit configuration of the host
764 # (set directly for a host or via folder inheritance in WATO)
765 try:
766 return credentials_variable[hostname]
767 except KeyError:
768 pass
770 # If a rule matches, use the first rule for the management board protocol of the host
771 rule_settings = get_config_cache().host_extra_conf(hostname, management_board_config)
772 for rule_protocol, credentials in rule_settings:
773 if rule_protocol == protocol:
774 return credentials
776 return default_value
780 # Agent communication
784 def agent_port_of(hostname):
785 ports = get_config_cache().host_extra_conf(hostname, agent_ports)
786 if len(ports) == 0:
787 return agent_port
789 return ports[0]
792 def tcp_connect_timeout_of(hostname):
793 timeouts = get_config_cache().host_extra_conf(hostname, tcp_connect_timeouts)
794 if len(timeouts) == 0:
795 return tcp_connect_timeout
797 return timeouts[0]
800 def agent_encryption_of(hostname):
801 settings = get_config_cache().host_extra_conf(hostname, agent_encryption)
802 if settings:
803 return settings[0]
805 return {'use_regular': 'disable', 'use_realtime': 'enforce'}
808 def agent_target_version(hostname):
809 agent_target_versions = get_config_cache().host_extra_conf(hostname,
810 check_mk_agent_target_versions)
811 if agent_target_versions:
812 spec = agent_target_versions[0]
813 if spec == "ignore":
814 return None
815 elif spec == "site":
816 return cmk.__version__
817 elif isinstance(spec, str):
818 # Compatibility to old value specification format (a single version string)
819 return spec
820 elif spec[0] == 'specific':
821 return spec[1]
823 return spec # return the whole spec in case of an "at least version" config
827 # Explicit custom variables
829 def get_explicit_service_custom_variables(hostname, description):
830 try:
831 return explicit_service_custom_variables[(hostname, description)]
832 except KeyError:
833 return {}
837 # Groups
841 def hostgroups_of(hostname):
842 return get_config_cache().host_extra_conf(hostname, host_groups)
845 def contactgroups_of(hostname):
846 cgrs = []
848 # host_contactgroups may take single values as well as
849 # lists as item value. Of all list entries only the first
850 # one is used. The single-contact-groups entries are all
851 # recognized.
852 first_list = True
853 for entry in get_config_cache().host_extra_conf(hostname, host_contactgroups):
854 if isinstance(entry, list) and first_list:
855 cgrs += entry
856 first_list = False
857 else:
858 cgrs.append(entry)
860 if monitoring_core == "nagios" and enable_rulebased_notifications:
861 cgrs.append("check-mk-notify")
863 return list(set(cgrs))
867 # Misc
871 def exit_code_spec(hostname, data_source_id=None):
872 spec = {}
873 specs = get_config_cache().host_extra_conf(hostname, check_mk_exit_status)
874 for entry in specs[::-1]:
875 spec.update(entry)
876 return _get_exit_code_spec(spec, data_source_id)
879 def _get_exit_code_spec(spec, data_source_id):
880 if data_source_id is not None:
881 try:
882 return spec["individual"][data_source_id]
883 except KeyError:
884 pass
886 try:
887 return spec["overall"]
888 except KeyError:
889 pass
891 # Old configuration format
892 return spec
895 def check_period_of(hostname, service):
896 periods = get_config_cache().service_extra_conf(hostname, service, check_periods)
897 if periods:
898 period = periods[0]
899 if period == "24X7":
900 return None
902 return period
904 return None
907 def check_interval_of(hostname, section_name):
908 if not cmk_base.cmk_base.check_utils.is_snmp_check(section_name):
909 return # no values at all for non snmp checks
911 # Previous to 1.5 "match" could be a check name (including subchecks) instead of
912 # only main check names -> section names. This has been cleaned up, but we still
913 # need to be compatible. Strip of the sub check part of "match".
914 for match, minutes in get_config_cache().host_extra_conf(hostname, snmp_check_interval):
915 if match is None or match.split(".")[0] == section_name:
916 return minutes # use first match
920 # .--Cluster-------------------------------------------------------------.
921 # | ____ _ _ |
922 # | / ___| |_ _ ___| |_ ___ _ __ |
923 # | | | | | | | / __| __/ _ \ '__| |
924 # | | |___| | |_| \__ \ || __/ | |
925 # | \____|_|\__,_|___/\__\___|_| |
926 # | |
927 # +----------------------------------------------------------------------+
928 # | Code dealing with clusters (virtual hosts that are used to deal with |
929 # | services that can move between physical nodes. |
930 # '----------------------------------------------------------------------'
933 # Determine weather a service (found on a physical host) is a clustered
934 # service and - if yes - return the cluster host of the service. If
935 # no, returns the hostname of the physical host.
936 # TODO: Clean this up!
937 def host_of_clustered_service(hostname, servicedesc, part_of_clusters=None):
938 return get_config_cache().host_of_clustered_service(
939 hostname, servicedesc, part_of_clusters=part_of_clusters)
943 # .--Services------------------------------------------------------------.
944 # | ____ _ |
945 # | / ___| ___ _ ____ _(_) ___ ___ ___ |
946 # | \___ \ / _ \ '__\ \ / / |/ __/ _ \/ __| |
947 # | ___) | __/ | \ V /| | (_| __/\__ \ |
948 # | |____/ \___|_| \_/ |_|\___\___||___/ |
949 # | |
950 # +----------------------------------------------------------------------+
951 # | Service related helper functions |
952 # '----------------------------------------------------------------------'
954 # Renaming of service descriptions while keeping backward compatibility with
955 # existing installations.
956 # Synchronize with htdocs/wato.py and plugins/wato/check_mk_configuration.py!
959 # Cleanup! .. some day
960 def _get_old_cmciii_temp_description(item):
961 if "Temperature" in item:
962 return False, item # old item format, no conversion
964 parts = item.split(" ")
965 if parts[0] == "Ambient":
966 return False, "%s Temperature" % parts[1]
968 elif len(parts) == 2:
969 return False, "%s %s.Temperature" % (parts[1], parts[0])
971 else:
972 if parts[1] == "LCP":
973 parts[1] = "Liquid_Cooling_Package"
974 return False, "%s %s.%s-Temperature" % (parts[1], parts[0], parts[2])
977 _old_service_descriptions = {
978 "df": "fs_%s",
979 "df_netapp": "fs_%s",
980 "df_netapp32": "fs_%s",
981 "esx_vsphere_datastores": "fs_%s",
982 "hr_fs": "fs_%s",
983 "vms_diskstat.df": "fs_%s",
984 "zfsget": "fs_%s",
985 "ps": "proc_%s",
986 "ps.perf": "proc_%s",
987 "wmic_process": "proc_%s",
988 "services": "service_%s",
989 "logwatch": "LOG %s",
990 "logwatch.groups": "LOG %s",
991 "hyperv_vm": "hyperv_vms",
992 "ibm_svc_mdiskgrp": "MDiskGrp %s",
993 "ibm_svc_system": "IBM SVC Info",
994 "ibm_svc_systemstats.diskio": "IBM SVC Throughput %s Total",
995 "ibm_svc_systemstats.iops": "IBM SVC IOPS %s Total",
996 "ibm_svc_systemstats.disk_latency": "IBM SVC Latency %s Total",
997 "ibm_svc_systemstats.cache": "IBM SVC Cache Total",
998 "mknotifyd": "Notification Spooler %s",
999 "mknotifyd.connection": "Notification Connection %s",
1000 "casa_cpu_temp": "Temperature %s",
1001 "cmciii.temp": _get_old_cmciii_temp_description,
1002 "cmciii.psm_current": "%s",
1003 "cmciii_lcp_airin": "LCP Fanunit Air IN",
1004 "cmciii_lcp_airout": "LCP Fanunit Air OUT",
1005 "cmciii_lcp_water": "LCP Fanunit Water %s",
1006 "etherbox.temp": "Sensor %s",
1007 # While using the old description, don't append the item, even when discovered
1008 # with the new check which creates an item.
1009 "liebert_bat_temp": lambda item: (False, "Battery Temp"),
1010 "nvidia.temp": "Temperature NVIDIA %s",
1011 "ups_bat_temp": "Temperature Battery %s",
1012 "innovaphone_temp": lambda item: (False, "Temperature"),
1013 "enterasys_temp": lambda item: (False, "Temperature"),
1014 "raritan_emx": "Rack %s",
1015 "raritan_pdu_inlet": "Input Phase %s",
1016 "postfix_mailq": lambda item: (False, "Postfix Queue"),
1017 "nullmailer_mailq": lambda item: (False, "Nullmailer Queue"),
1018 "barracuda_mailqueues": lambda item: (False, "Mail Queue"),
1019 "qmail_stats": lambda item: (False, "Qmail Queue"),
1020 "mssql_backup": "%s Backup",
1021 "mssql_counters.cache_hits": "%s",
1022 "mssql_counters.transactions": "%s Transactions",
1023 "mssql_counters.locks": "%s Locks",
1024 "mssql_counters.sqlstats": "%s",
1025 "mssql_counters.pageactivity": "%s Page Activity",
1026 "mssql_counters.locks_per_batch": "%s Locks per Batch",
1027 "mssql_counters.file_sizes": "%s File Sizes",
1028 "mssql_databases": "%s Database",
1029 "mssql_datafiles": "Datafile %s",
1030 "mssql_tablespaces": "%s Sizes",
1031 "mssql_transactionlogs": "Transactionlog %s",
1032 "mssql_versions": "%s Version",
1033 "mssql_blocked_sessions": lambda item: (False, "MSSQL Blocked Sessions"),
1037 def service_description(hostname, check_plugin_name, item):
1038 if check_plugin_name not in check_info:
1039 if item:
1040 return "Unimplemented check %s / %s" % (check_plugin_name, item)
1041 return "Unimplemented check %s" % check_plugin_name
1043 # use user-supplied service description, if available
1044 add_item = True
1045 descr_format = service_descriptions.get(check_plugin_name)
1046 if not descr_format:
1047 # handle renaming for backward compatibility
1048 if check_plugin_name in _old_service_descriptions and \
1049 check_plugin_name not in use_new_descriptions_for:
1051 # Can be a fucntion to generate the old description more flexible.
1052 old_descr = _old_service_descriptions[check_plugin_name]
1053 if callable(old_descr):
1054 add_item, descr_format = old_descr(item)
1055 else:
1056 descr_format = old_descr
1058 else:
1059 descr_format = check_info[check_plugin_name]["service_description"]
1061 if isinstance(descr_format, str):
1062 descr_format = descr_format.decode("utf-8")
1064 # Note: we strip the service description (remove spaces).
1065 # One check defines "Pages %s" as a description, but the item
1066 # can by empty in some cases. Nagios silently drops leading
1067 # and trailing spaces in the configuration file.
1068 if add_item and isinstance(item, six.string_types + (numbers.Integral,)):
1069 if "%s" not in descr_format:
1070 descr_format += " %s"
1071 descr = descr_format % (item,)
1072 else:
1073 descr = descr_format
1075 if "%s" in descr:
1076 raise MKGeneralException("Found '%%s' in service description (Host: %s, Check type: %s, Item: %s). "
1077 "Please try to rediscover the service to fix this issue." % \
1078 (hostname, check_plugin_name, item))
1080 return get_final_service_description(hostname, descr)
1083 _old_active_check_service_descriptions = {
1084 "http": lambda params: (params[0][1:] if params[0].startswith("^") else "HTTP %s" % params[0])
1088 def active_check_service_description(hostname, active_check_name, params):
1089 if active_check_name not in active_check_info:
1090 return "Unimplemented check %s" % active_check_name
1092 if (active_check_name in _old_active_check_service_descriptions and
1093 active_check_name not in use_new_descriptions_for):
1094 description = _old_active_check_service_descriptions[active_check_name](params)
1095 else:
1096 act_info = active_check_info[active_check_name]
1097 description = act_info["service_description"](params)
1099 description = description.replace('$HOSTNAME$', hostname)
1101 return get_final_service_description(hostname, description)
1104 def get_final_service_description(hostname, description):
1105 translations = get_service_translations(hostname)
1106 if translations:
1107 # Translate
1108 description = cmk.utils.translations.translate_service_description(
1109 translations, description)
1111 # Sanitize; Remove illegal characters from a service description
1112 description = description.strip()
1113 cache = cmk_base.config_cache.get_dict("final_service_description")
1114 try:
1115 new_description = cache[description]
1116 except KeyError:
1117 new_description = "".join(
1118 [c for c in description if c not in nagios_illegal_chars]).rstrip("\\")
1119 cache[description] = new_description
1121 return new_description
1124 def service_ignored(hostname, check_plugin_name, description):
1125 if check_plugin_name and check_plugin_name in ignored_checktypes:
1126 return True
1128 if check_plugin_name and _checktype_ignored_for_host(hostname, check_plugin_name):
1129 return True
1131 if description is not None \
1132 and get_config_cache().in_boolean_serviceconf_list(hostname, description, ignored_services):
1133 return True
1135 return False
1138 def _checktype_ignored_for_host(host, checktype):
1139 if checktype in ignored_checktypes:
1140 return True
1141 ignored = get_config_cache().host_extra_conf(host, ignored_checks)
1142 for e in ignored:
1143 if checktype == e or (isinstance(e, list) and checktype in e):
1144 return True
1145 return False
1148 # TODO: Make this use the generic "rulesets" functions
1149 # a) This function has never been configurable via WATO (see https://mathias-kettner.de/checkmk_service_dependencies.html)
1150 # b) It only affects the Nagios core - CMC does not implement service dependencies
1151 # c) This function implements some specific regex replacing match+replace which makes it incompatible to
1152 # regular service rulesets. Therefore service_extra_conf() can not easily be used :-/
1153 def service_depends_on(hostname, servicedesc):
1154 """Return a list of services this services depends upon"""
1155 deps = []
1156 config_cache = get_config_cache()
1157 for entry in service_dependencies:
1158 entry, rule_options = get_rule_options(entry)
1159 if rule_options.get("disabled"):
1160 continue
1162 if len(entry) == 3:
1163 depname, hostlist, patternlist = entry
1164 tags = []
1165 elif len(entry) == 4:
1166 depname, tags, hostlist, patternlist = entry
1167 else:
1168 raise MKGeneralException("Invalid entry '%r' in service dependencies: "
1169 "must have 3 or 4 entries" % entry)
1171 if hosttags_match_taglist(config_cache.tag_list_of_host(hostname), tags) and \
1172 in_extraconf_hostlist(hostlist, hostname):
1173 for pattern in patternlist:
1174 matchobject = regex(pattern).search(servicedesc)
1175 if matchobject:
1176 try:
1177 item = matchobject.groups()[-1]
1178 deps.append(depname % item)
1179 except:
1180 deps.append(depname)
1181 return deps
1185 # .--Misc Helpers--------------------------------------------------------.
1186 # | __ __ _ _ _ _ |
1187 # | | \/ (_)___ ___ | | | | ___| |_ __ ___ _ __ ___ |
1188 # | | |\/| | / __|/ __| | |_| |/ _ \ | '_ \ / _ \ '__/ __| |
1189 # | | | | | \__ \ (__ | _ | __/ | |_) | __/ | \__ \ |
1190 # | |_| |_|_|___/\___| |_| |_|\___|_| .__/ \___|_| |___/ |
1191 # | |_| |
1192 # +----------------------------------------------------------------------+
1193 # | Different helper functions |
1194 # '----------------------------------------------------------------------'
1197 def is_cmc():
1198 """Whether or not the site is currently configured to use the Microcore."""
1199 return monitoring_core == "cmc"
1202 def decode_incoming_string(s, encoding="utf-8"):
1203 try:
1204 return s.decode(encoding)
1205 except:
1206 return s.decode(fallback_agent_output_encoding)
1209 def translate_piggyback_host(sourcehost, backedhost):
1210 translation = _get_piggyback_translations(sourcehost)
1212 # To make it possible to match umlauts we need to change the hostname
1213 # to a unicode string which can then be matched with regexes etc.
1214 # We assume the incoming name is correctly encoded in UTF-8
1215 backedhost = decode_incoming_string(backedhost)
1217 translated = cmk.utils.translations.translate_hostname(translation, backedhost)
1219 return translated.encode('utf-8') # change back to UTF-8 encoded string
1222 def _get_piggyback_translations(hostname):
1223 """Get a dict that specifies the actions to be done during the hostname translation"""
1224 rules = get_config_cache().host_extra_conf(hostname, piggyback_translation)
1225 translations = {}
1226 for rule in rules[::-1]:
1227 translations.update(rule)
1228 return translations
1231 def get_service_translations(hostname):
1232 translations_cache = cmk_base.config_cache.get_dict("service_description_translations")
1233 if hostname in translations_cache:
1234 return translations_cache[hostname]
1236 rules = get_config_cache().host_extra_conf(hostname, service_description_translation)
1237 translations = {}
1238 for rule in rules[::-1]:
1239 for k, v in rule.items():
1240 if isinstance(v, list):
1241 translations.setdefault(k, set())
1242 translations[k] |= set(v)
1243 else:
1244 translations[k] = v
1246 translations_cache[hostname] = translations
1247 return translations
1250 def prepare_check_command(command_spec, hostname, description):
1251 """Prepares a check command for execution by Check_MK.
1253 This function either accepts a string or a list of arguments as
1254 command_spec. In case a list is given it quotes the single elements. It
1255 also prepares password store entries for the command line. These entries
1256 will be completed by the executed program later to get the password from
1257 the password store.
1259 if isinstance(command_spec, six.string_types):
1260 return command_spec
1262 if not isinstance(command_spec, list):
1263 raise NotImplementedError()
1265 passwords, formated = [], []
1266 for arg in command_spec:
1267 arg_type = type(arg)
1269 if arg_type in [int, float]:
1270 formated.append("%s" % arg)
1272 elif arg_type in [str, unicode]:
1273 formated.append(cmk_base.utils.quote_shell_string(arg))
1275 elif arg_type == tuple and len(arg) == 3:
1276 pw_ident, preformated_arg = arg[1:]
1277 try:
1278 password = stored_passwords[pw_ident]["password"]
1279 except KeyError:
1280 if hostname and description:
1281 descr = " used by service \"%s\" on host \"%s\"" % (description, hostname)
1282 elif hostname:
1283 descr = " used by host host \"%s\"" % (hostname)
1284 else:
1285 descr = ""
1287 console.warning(
1288 "The stored password \"%s\"%s does not exist (anymore)." % (pw_ident, descr))
1289 password = "%%%"
1291 pw_start_index = str(preformated_arg.index("%s"))
1292 formated.append(
1293 cmk_base.utils.quote_shell_string(preformated_arg % ("*" * len(password))))
1294 passwords.append((str(len(formated)), pw_start_index, pw_ident))
1296 else:
1297 raise MKGeneralException("Invalid argument for command line: %r" % (arg,))
1299 if passwords:
1300 formated = ["--pwstore=%s" % ",".join(["@".join(p) for p in passwords])] + formated
1302 return " ".join(formated)
1305 def get_http_proxy(http_proxy):
1306 # type: (Tuple) -> Optional[str]
1307 """Returns proxy URL to be used for HTTP requests
1309 Pass a value configured by the user using the HTTPProxyReference valuespec to this function
1310 and you will get back ether a proxy URL, an empty string to enforce no proxy usage or None
1311 to use the proxy configuration from the process environment.
1313 if not isinstance(http_proxy, tuple):
1314 return None
1316 proxy_type, value = http_proxy
1318 if proxy_type == "environment":
1319 return None
1321 if proxy_type == "global":
1322 return http_proxies.get(value, {}).get("proxy_url", None)
1324 if proxy_type == "url":
1325 return value
1327 if proxy_type == "no_proxy":
1328 return ""
1330 return None
1334 # .--Host matching-------------------------------------------------------.
1335 # | _ _ _ _ _ _ |
1336 # | | | | | ___ ___| |_ _ __ ___ __ _| |_ ___| |__ (_)_ __ __ _ |
1337 # | | |_| |/ _ \/ __| __| | '_ ` _ \ / _` | __/ __| '_ \| | '_ \ / _` | |
1338 # | | _ | (_) \__ \ |_ | | | | | | (_| | || (__| | | | | | | | (_| | |
1339 # | |_| |_|\___/|___/\__| |_| |_| |_|\__,_|\__\___|_| |_|_|_| |_|\__, | |
1340 # | |___/ |
1341 # +----------------------------------------------------------------------+
1342 # | Code for calculating the host condition matching of rules |
1343 # '----------------------------------------------------------------------'
1346 def all_matching_hosts(tags, hostlist, with_foreign_hosts):
1347 return get_config_cache().all_matching_hosts(tags, hostlist, with_foreign_hosts)
1350 def in_extraconf_hostlist(hostlist, hostname):
1351 """Whether or not the given host matches the hostlist.
1353 Entries in list are hostnames that must equal the hostname.
1354 Expressions beginning with ! are negated: if they match,
1355 the item is excluded from the list.
1357 Expressions beginning with ~ are treated as regular expression.
1358 Also the three special tags '@all', '@clusters', '@physical'
1359 are allowed.
1362 # Migration help: print error if old format appears in config file
1363 # FIXME: When can this be removed?
1364 try:
1365 if hostlist[0] == "":
1366 raise MKGeneralException('Invalid empty entry [ "" ] in configuration')
1367 except IndexError:
1368 pass # Empty list, no problem.
1370 for hostentry in hostlist:
1371 if hostentry == '':
1372 raise MKGeneralException('Empty hostname in host list %r' % hostlist)
1373 negate = False
1374 use_regex = False
1375 if hostentry[0] == '@':
1376 if hostentry == '@all':
1377 return True
1378 # TODO: Is not used anymore for a long time. Will be cleaned up
1379 # with 1.6 tuple ruleset cleanup
1380 #ic = is_cluster(hostname)
1381 #if hostentry == '@cluster' and ic:
1382 # return True
1383 #elif hostentry == '@physical' and not ic:
1384 # return True
1386 # Allow negation of hostentry with prefix '!'
1387 else:
1388 if hostentry[0] == '!':
1389 hostentry = hostentry[1:]
1390 negate = True
1392 # Allow regex with prefix '~'
1393 if hostentry[0] == '~':
1394 hostentry = hostentry[1:]
1395 use_regex = True
1397 try:
1398 if not use_regex and hostname == hostentry:
1399 return not negate
1400 # Handle Regex. Note: hostname == True -> generic unknown host
1401 elif use_regex and hostname != True:
1402 if regex(hostentry).match(hostname) is not None:
1403 return not negate
1404 except MKGeneralException:
1405 if cmk.utils.debug.enabled():
1406 raise
1408 return False
1411 def parse_host_rule(rule):
1412 rule, rule_options = get_rule_options(rule)
1414 num_elements = len(rule)
1415 if num_elements == 2:
1416 item, hostlist = rule
1417 tags = []
1418 elif num_elements == 3:
1419 item, tags, hostlist = rule
1420 else:
1421 raise MKGeneralException("Invalid entry '%r' in host configuration list: must "
1422 "have 2 or 3 entries" % (rule,))
1424 return item, tags, hostlist, rule_options
1427 def get_rule_options(entry):
1428 """Get the options from a rule.
1430 Pick out the option element of a rule. Currently the options "disabled"
1431 and "comments" are being honored."""
1432 if isinstance(entry[-1], dict):
1433 return entry[:-1], entry[-1]
1435 return entry, {}
1438 def hosttags_match_taglist(hosttags, required_tags):
1439 """Check if a host fulfills the requirements of a tag list.
1441 The host must have all tags in the list, except
1442 for those negated with '!'. Those the host must *not* have!
1443 A trailing + means a prefix match."""
1444 for tag in required_tags:
1445 negate, tag = _parse_negated(tag)
1446 if tag and tag[-1] == '+':
1447 tag = tag[:-1]
1448 matches = False
1449 for t in hosttags:
1450 if t.startswith(tag):
1451 matches = True
1452 break
1454 else:
1455 matches = tag in hosttags
1457 if matches == negate:
1458 return False
1460 return True
1463 def _parse_negated(pattern):
1464 # Allow negation of pattern with prefix '!'
1465 try:
1466 negate = pattern[0] == '!'
1467 if negate:
1468 pattern = pattern[1:]
1469 except IndexError:
1470 negate = False
1472 return negate, pattern
1475 # Converts a regex pattern which is used to e.g. match services within Check_MK
1476 # to a function reference to a matching function which takes one parameter to
1477 # perform the matching and returns a two item tuple where the first element
1478 # tells wether or not the pattern is negated and the second element the outcome
1479 # of the match.
1480 # This function tries to parse the pattern and return different kind of matching
1481 # functions which can then be performed faster than just using the regex match.
1482 def _convert_pattern(pattern):
1483 def is_infix_string_search(pattern):
1484 return pattern.startswith('.*') and not is_regex(pattern[2:])
1486 def is_exact_match(pattern):
1487 return pattern[-1] == '$' and not is_regex(pattern[:-1])
1489 def is_prefix_match(pattern):
1490 return pattern[-2:] == '.*' and not is_regex(pattern[:-2])
1492 if pattern == '':
1493 return False, lambda txt: True # empty patterns match always
1495 negate, pattern = _parse_negated(pattern)
1497 if is_exact_match(pattern):
1498 # Exact string match
1499 return negate, lambda txt: pattern[:-1] == txt
1501 elif is_infix_string_search(pattern):
1502 # Using regex to search a substring within text
1503 return negate, lambda txt: pattern[2:] in txt
1505 elif is_prefix_match(pattern):
1506 # prefix match with tailing .*
1507 pattern = pattern[:-2]
1508 return negate, lambda txt: txt[:len(pattern)] == pattern
1510 elif is_regex(pattern):
1511 # Non specific regex. Use real prefix regex matching
1512 return negate, lambda txt: regex(pattern).match(txt) is not None
1514 # prefix match without any regex chars
1515 return negate, lambda txt: txt[:len(pattern)] == pattern
1518 def _convert_pattern_list(patterns):
1519 return tuple([_convert_pattern(p) for p in patterns])
1522 # Slow variant of checking wether a service is matched by a list
1523 # of regexes - used e.g. by cmk --notify
1524 def in_extraconf_servicelist(servicelist, service):
1525 return _in_servicematcher_list(_convert_pattern_list(servicelist), service)
1528 def _in_servicematcher_list(service_matchers, item):
1529 for negate, func in service_matchers:
1530 result = func(item)
1531 if result:
1532 return not negate
1534 # no match in list -> negative answer
1535 return False
1539 # .--Constants-----------------------------------------------------------.
1540 # | ____ _ _ |
1541 # | / ___|___ _ __ ___| |_ __ _ _ __ | |_ ___ |
1542 # | | | / _ \| '_ \/ __| __/ _` | '_ \| __/ __| |
1543 # | | |__| (_) | | | \__ \ || (_| | | | | |_\__ \ |
1544 # | \____\___/|_| |_|___/\__\__,_|_| |_|\__|___/ |
1545 # | |
1546 # +----------------------------------------------------------------------+
1547 # | Some constants to be used in the configuration and at other places |
1548 # '----------------------------------------------------------------------'
1550 # Conveniance macros for legacy tuple based host and service rules
1551 # TODO: Deprecate these in a gentle way
1552 PHYSICAL_HOSTS = cmk.utils.rulesets.tuple_rulesets.PHYSICAL_HOSTS
1553 CLUSTER_HOSTS = cmk.utils.rulesets.tuple_rulesets.CLUSTER_HOSTS
1554 ALL_HOSTS = cmk.utils.rulesets.tuple_rulesets.ALL_HOSTS
1555 ALL_SERVICES = cmk.utils.rulesets.tuple_rulesets.ALL_SERVICES
1556 NEGATE = cmk.utils.rulesets.tuple_rulesets.NEGATE
1558 # TODO: Cleanup access to check_info[] -> replace it by different function calls
1559 # like for example check_exists(...)
1561 # BE AWARE: sync these global data structures with
1562 # _initialize_data_structures()
1563 # TODO: Refactor this.
1565 # The checks are loaded into this dictionary. Each check
1566 _check_contexts = {} # type: Dict[str, Any]
1567 # has a separate sub-dictionary, named by the check name.
1568 # It is populated with the includes and the check itself.
1570 # The following data structures will be filled by the checks
1571 # all known checks
1572 check_info = {} # type: Dict[str, Union[Tuple[Any], Dict[str, Any]]]
1573 # library files needed by checks
1574 check_includes = {} # type: Dict[str, List[Any]]
1575 # optional functions for parameter precompilation
1576 precompile_params = {} # type: Dict[str, Callable[[str, str, Dict[str, Any]], Any]]
1577 # dictionary-configured checks declare their default level variables here
1578 check_default_levels = {} # type: Dict[str, Any]
1579 # factory settings for dictionary-configured checks
1580 factory_settings = {} # type: Dict[str, Dict[str, Any]]
1581 # variables (names) in checks/* needed for check itself
1582 check_config_variables = [] # type: List[Any]
1583 # whichs OIDs to fetch for which check (for tabular information)
1584 snmp_info = {} # type: Dict[str, Union[Tuple[Any], List[Tuple[Any]]]]
1585 # SNMP autodetection
1586 snmp_scan_functions = {} # type: Dict[str, Callable[[Callable[[str], str]], bool]]
1587 # definitions of active "legacy" checks
1588 active_check_info = {} # type: Dict[str, Dict[str, Any]]
1589 special_agent_info = {
1590 } # type: Dict[str, Callable[[Dict[str, Any], str, str], Union[str, List[str]]]]
1592 # Names of variables registered in the check files. This is used to
1593 # keep track of the variables needed by each file. Those variables are then
1594 # (if available) read from the config and applied to the checks module after
1595 # reading in the configuration of the user.
1596 _check_variables = {} # type: Dict[str, List[Any]]
1597 # keeps the default values of all the check variables
1598 _check_variable_defaults = {} # type: Dict[str, Any]
1599 _all_checks_loaded = False
1601 # workaround: set of check-groups that are to be treated as service-checks even if
1602 # the item is None
1603 service_rule_groups = set(["temperature"])
1606 # .--Loading-------------------------------------------------------------.
1607 # | _ _ _ |
1608 # | | | ___ __ _ __| (_)_ __ __ _ |
1609 # | | | / _ \ / _` |/ _` | | '_ \ / _` | |
1610 # | | |__| (_) | (_| | (_| | | | | | (_| | |
1611 # | |_____\___/ \__,_|\__,_|_|_| |_|\__, | |
1612 # | |___/ |
1613 # +----------------------------------------------------------------------+
1614 # | Loading of check plugins |
1615 # '----------------------------------------------------------------------'
1618 def load_all_checks(get_check_api_context):
1619 """Load all checks and includes"""
1620 global _all_checks_loaded
1622 _initialize_data_structures()
1623 filelist = get_plugin_paths(cmk.utils.paths.local_checks_dir, cmk.utils.paths.checks_dir)
1624 load_checks(get_check_api_context, filelist)
1626 _all_checks_loaded = True
1629 def _initialize_data_structures():
1630 """Initialize some data structures which are populated while loading the checks"""
1631 global _all_checks_loaded
1632 _all_checks_loaded = False
1634 _check_variables.clear()
1635 _check_variable_defaults.clear()
1637 _check_contexts.clear()
1638 check_info.clear()
1639 check_includes.clear()
1640 precompile_params.clear()
1641 check_default_levels.clear()
1642 factory_settings.clear()
1643 del check_config_variables[:]
1644 snmp_info.clear()
1645 snmp_scan_functions.clear()
1646 active_check_info.clear()
1647 special_agent_info.clear()
1650 def get_plugin_paths(*dirs):
1651 filelist = []
1652 for directory in dirs:
1653 filelist += _plugin_pathnames_in_directory(directory)
1654 return filelist
1657 # Now read in all checks. Note: this is done *before* reading the
1658 # configuration, because checks define variables with default
1659 # values user can override those variables in his configuration.
1660 # If a check or check.include is both found in local/ and in the
1661 # normal structure, then only the file in local/ must be read!
1662 def load_checks(get_check_api_context, filelist):
1663 cmk_global_vars = set(get_variable_names())
1665 loaded_files = set()
1667 for f in filelist:
1668 if f[0] == "." or f[-1] == "~":
1669 continue # ignore editor backup / temp files
1671 file_name = os.path.basename(f)
1672 if file_name in loaded_files:
1673 continue # skip already loaded files (e.g. from local)
1675 try:
1676 check_context = new_check_context(get_check_api_context)
1678 known_vars = check_context.keys()
1679 known_checks = check_info.keys()
1680 known_active_checks = active_check_info.keys()
1682 load_check_includes(f, check_context)
1684 load_precompiled_plugin(f, check_context)
1685 loaded_files.add(file_name)
1687 except MKTerminate:
1688 raise
1690 except Exception as e:
1691 console.error("Error in plugin file %s: %s\n", f, e)
1692 if cmk.utils.debug.enabled():
1693 raise
1694 else:
1695 continue
1697 new_checks = set(check_info.keys()).difference(known_checks)
1698 new_active_checks = set(active_check_info.keys()).difference(known_active_checks)
1700 # Now store the check context for all checks found in this file
1701 for check_plugin_name in new_checks:
1702 _check_contexts[check_plugin_name] = check_context
1704 for check_plugin_name in new_active_checks:
1705 _check_contexts[check_plugin_name] = check_context
1707 # Collect all variables that the check file did introduce compared to the
1708 # default check context
1709 new_check_vars = {}
1710 for varname in set(check_context.keys()).difference(known_vars):
1711 new_check_vars[varname] = check_context[varname]
1713 # The default_levels_variable of check_info also declares use of a global
1714 # variable. Register it here for this context.
1715 for check_plugin_name in new_checks:
1716 # The check_info is not converted yet (convert_check_info()). This means we need
1717 # to deal with old style tuple configured checks
1718 if isinstance(check_info[check_plugin_name], tuple):
1719 default_levels_varname = check_default_levels.get(check_plugin_name)
1720 else:
1721 default_levels_varname = check_info[check_plugin_name].get(
1722 "default_levels_variable")
1724 if default_levels_varname:
1725 # Add the initial configuration to the check context to have a consistent state
1726 check_context[default_levels_varname] = factory_settings.get(
1727 default_levels_varname, {})
1728 new_check_vars[default_levels_varname] = check_context[default_levels_varname]
1730 # Save check variables for e.g. after config loading that the config can
1731 # be added to the check contexts
1732 for varname, value in new_check_vars.items():
1733 # Do not allow checks to override Check_MK builtin global variables. Silently
1734 # skip them here. The variables will only be locally available to the checks.
1735 if varname in cmk_global_vars:
1736 continue
1738 if varname.startswith("_"):
1739 continue
1741 if inspect.isfunction(value) or inspect.ismodule(value):
1742 continue
1744 _check_variable_defaults[varname] = value
1746 # Keep track of which variable needs to be set to which context
1747 context_ident_list = _check_variables.setdefault(varname, [])
1748 context_ident_list += new_checks
1749 context_ident_list += new_active_checks
1751 # Now convert check_info to new format.
1752 convert_check_info()
1753 verify_checkgroup_members()
1754 initialize_check_type_caches()
1757 def all_checks_loaded():
1758 """Whether or not all(!) checks have been loaded into the current process"""
1759 return _all_checks_loaded
1762 def any_check_loaded():
1763 """Whether or not some checks have been loaded into the current process"""
1764 return bool(_check_contexts)
1767 # Constructs a new check context dictionary. It contains the whole check API.
1768 def new_check_context(get_check_api_context):
1769 # Add the data structures where the checks register with Check_MK
1770 context = {
1771 "check_info": check_info,
1772 "check_includes": check_includes,
1773 "precompile_params": precompile_params,
1774 "check_default_levels": check_default_levels,
1775 "factory_settings": factory_settings,
1776 "check_config_variables": check_config_variables,
1777 "snmp_info": snmp_info,
1778 "snmp_scan_functions": snmp_scan_functions,
1779 "active_check_info": active_check_info,
1780 "special_agent_info": special_agent_info,
1782 # NOTE: For better separation it would be better to copy the values, but
1783 # this might consume too much memory, so we simply reference them.
1784 context.update(get_check_api_context())
1785 return context
1788 # Load the definitions of the required include files for this check
1789 # Working with imports when specifying the includes would be much cleaner,
1790 # sure. But we need to deal with the current check API.
1791 def load_check_includes(check_file_path, check_context):
1792 for include_file_name in cached_includes_of_plugin(check_file_path):
1793 include_file_path = check_include_file_path(include_file_name)
1794 try:
1795 load_precompiled_plugin(include_file_path, check_context)
1796 except MKTerminate:
1797 raise
1799 except Exception as e:
1800 console.error("Error in check include file %s: %s\n", include_file_path, e)
1801 if cmk.utils.debug.enabled():
1802 raise
1803 else:
1804 continue
1807 def check_include_file_path(include_file_name):
1808 local_path = os.path.join(cmk.utils.paths.local_checks_dir, include_file_name)
1809 if os.path.exists(local_path):
1810 return local_path
1811 return os.path.join(cmk.utils.paths.checks_dir, include_file_name)
1814 def cached_includes_of_plugin(check_file_path):
1815 cache_file_path = _include_cache_file_path(check_file_path)
1816 try:
1817 return _get_cached_check_includes(check_file_path, cache_file_path)
1818 except OSError:
1819 pass # No usable cache. Terminate
1821 includes = includes_of_plugin(check_file_path)
1822 _write_check_include_cache(cache_file_path, includes)
1823 return includes
1826 def _get_cached_check_includes(check_file_path, cache_file_path):
1827 check_stat = os.stat(check_file_path)
1828 cache_stat = os.stat(cache_file_path)
1830 if check_stat.st_mtime >= cache_stat.st_mtime:
1831 raise OSError("Cache is too old")
1833 # There are no includes (just the newline at the end)
1834 if cache_stat.st_size == 1:
1835 return [] # No includes
1837 # store.save_file() creates file empty for locking (in case it does not exists).
1838 # Skip loading the file.
1839 # Note: When raising here this process will also write the file. This means it
1840 # will write it another time after it was written by the other process. This
1841 # could be optimized. Since the whole caching here is a temporary(tm) soltion,
1842 # we leave it as it is.
1843 if cache_stat.st_size == 0:
1844 raise OSError("Cache generation in progress (file is locked)")
1846 x = open(cache_file_path).read().strip()
1847 if not x:
1848 return [] # Shouldn't happen. Empty files are handled above
1849 return x.split("|")
1852 def _write_check_include_cache(cache_file_path, includes):
1853 store.makedirs(os.path.dirname(cache_file_path))
1854 store.save_file(cache_file_path, "%s\n" % "|".join(includes))
1857 def _include_cache_file_path(path):
1858 is_local = path.startswith(cmk.utils.paths.local_checks_dir)
1859 return os.path.join(cmk.utils.paths.include_cache_dir, "local" if is_local else "builtin",
1860 os.path.basename(path))
1863 # Parse the check file without executing the code to find the check include
1864 # files the check uses. The following statements are extracted:
1865 # check_info[...] = { "includes": [...] }
1866 # inv_info[...] = { "includes": [...] }
1867 # check_includes[...] = [...]
1868 def includes_of_plugin(check_file_path):
1869 include_names = OrderedDict()
1871 def _load_from_check_info(node):
1872 if not isinstance(node.value, ast.Dict):
1873 return
1875 for key, val in zip(node.value.keys, node.value.values):
1876 if key.s == "includes":
1877 if isinstance(val, ast.List):
1878 for element in val.elts:
1879 include_names[element.s] = True
1880 else:
1881 raise MKGeneralException("Includes must be a list of include file names, "
1882 "found '%s'" % type(val))
1884 def _load_from_check_includes(node):
1885 if isinstance(node.value, ast.List):
1886 for element in node.value.elts:
1887 include_names[element.s] = True
1889 tree = ast.parse(open(check_file_path).read())
1890 for child in ast.iter_child_nodes(tree):
1891 if not isinstance(child, ast.Assign):
1892 continue # We only care about top level assigns
1894 # Filter out assignments to check_info dictionary
1895 for target in child.targets:
1896 if isinstance(target, ast.Subscript) and isinstance(target.value, ast.Name):
1897 if target.value.id in ["check_info", "inv_info"]:
1898 _load_from_check_info(child)
1899 elif target.value.id == "check_includes":
1900 _load_from_check_includes(child)
1902 return include_names.keys()
1905 def _plugin_pathnames_in_directory(path):
1906 if path and os.path.exists(path):
1907 return sorted([
1908 path + "/" + f
1909 for f in os.listdir(path)
1910 if not f.startswith(".") and not f.endswith(".include")
1912 return []
1915 def load_precompiled_plugin(path, check_context):
1916 """Loads the given check or check include plugin into the given
1917 check context.
1919 To improve loading speed the files are not read directly. The files are
1920 python byte-code compiled before in case it has not been done before. In
1921 case there is already a compiled file that is newer than the current one,
1922 then the precompiled file is loaded."""
1924 precompiled_path = _precompiled_plugin_path(path)
1926 if not _is_plugin_precompiled(path, precompiled_path):
1927 console.vverbose("Precompile %s to %s\n" % (path, precompiled_path))
1928 store.makedirs(os.path.dirname(precompiled_path))
1929 py_compile.compile(path, precompiled_path, doraise=True)
1931 exec (marshal.loads(open(precompiled_path, "rb").read()[8:]), check_context)
1934 def _is_plugin_precompiled(path, precompiled_path):
1935 if not os.path.exists(precompiled_path):
1936 return False
1938 # Check precompiled file header
1939 f = open(precompiled_path, "rb")
1941 file_magic = f.read(4)
1942 if file_magic != py_compile.MAGIC:
1943 return False
1945 try:
1946 origin_file_mtime = struct.unpack("I", f.read(4))[0]
1947 except struct.error:
1948 return False
1950 if long(os.stat(path).st_mtime) != origin_file_mtime:
1951 return False
1953 return True
1956 def _precompiled_plugin_path(path):
1957 is_local = path.startswith(cmk.utils.paths.local_checks_dir)
1958 return os.path.join(cmk.utils.paths.precompiled_checks_dir, "local" if is_local else "builtin",
1959 os.path.basename(path))
1962 def check_variable_names():
1963 return _check_variables.keys()
1966 def get_check_variable_defaults():
1967 """Returns the check variable default settings. These are the settings right
1968 after loading the checks."""
1969 return _check_variable_defaults
1972 def set_check_variables(check_variables):
1973 """Update the check related config variables in the relevant check contexts"""
1974 for varname, value in check_variables.items():
1975 for context_ident in _check_variables[varname]:
1976 _check_contexts[context_ident][varname] = value
1979 def get_check_variables():
1980 """Returns the currently effective check variable settings
1982 Since the variables are only stored in the individual check contexts and not stored
1983 in a central place, this function needs to collect the values from the check contexts.
1984 We assume a single variable has the same value in all relevant contexts, which means
1985 that it is enough to get the variable from the first context."""
1986 check_config = {}
1987 for varname, context_ident_list in _check_variables.iteritems():
1988 check_config[varname] = _check_contexts[context_ident_list[0]][varname]
1989 return check_config
1992 def get_check_context(check_plugin_name):
1993 """Returns the context dictionary of the given check plugin"""
1994 return _check_contexts[check_plugin_name]
1997 # FIXME: Clear / unset all legacy variables to prevent confusions in other code trying to
1998 # use the legacy variables which are not set by newer checks.
1999 def convert_check_info():
2000 check_info_defaults = {
2001 "check_function": None,
2002 "inventory_function": None,
2003 "parse_function": None,
2004 "group": None,
2005 "snmp_info": None,
2006 "snmp_scan_function": None,
2007 "handle_empty_info": False,
2008 "handle_real_time_checks": False,
2009 "default_levels_variable": None,
2010 "node_info": False,
2011 "extra_sections": [],
2012 "service_description": None,
2013 "has_perfdata": False,
2014 "management_board": None,
2017 for check_plugin_name, info in check_info.items():
2018 section_name = cmk_base.check_utils.section_name_of(check_plugin_name)
2020 if not isinstance(info, dict):
2021 # Convert check declaration from old style to new API
2022 check_function, descr, has_perfdata, inventory_function = info
2024 scan_function = snmp_scan_functions.get(check_plugin_name,
2025 snmp_scan_functions.get(section_name))
2027 check_info[check_plugin_name] = {
2028 "check_function": check_function,
2029 "service_description": descr,
2030 "has_perfdata": bool(has_perfdata),
2031 "inventory_function": inventory_function,
2032 # Insert check name as group if no group is being defined
2033 "group": check_plugin_name,
2034 "snmp_info": snmp_info.get(check_plugin_name),
2035 # Sometimes the scan function is assigned to the check_plugin_name
2036 # rather than to the base name.
2037 "snmp_scan_function": scan_function,
2038 "handle_empty_info": False,
2039 "handle_real_time_checks": False,
2040 "default_levels_variable": check_default_levels.get(check_plugin_name),
2041 "node_info": False,
2042 "parse_function": None,
2043 "extra_sections": [],
2044 "management_board": None,
2046 else:
2047 # Ensure that there are only the known keys set. Is meant to detect typos etc.
2048 for key in info.keys():
2049 if key != "includes" and key not in check_info_defaults:
2050 raise MKGeneralException(
2051 "The check '%s' declares an unexpected key '%s' in 'check_info'." %
2052 (check_plugin_name, key))
2054 # Check does already use new API. Make sure that all keys are present,
2055 # extra check-specific information into file-specific variables.
2056 for key, val in check_info_defaults.items():
2057 info.setdefault(key, val)
2059 # Include files are related to the check file (= the section_name),
2060 # not to the (sub-)check. So we keep them in check_includes.
2061 check_includes.setdefault(section_name, [])
2062 check_includes[section_name] += info.get("includes", [])
2064 # Make sure that setting for node_info of check and subcheck matches
2065 for check_plugin_name, info in check_info.iteritems():
2066 if "." in check_plugin_name:
2067 section_name = cmk_base.check_utils.section_name_of(check_plugin_name)
2068 if section_name not in check_info:
2069 if info["node_info"]:
2070 raise MKGeneralException(
2071 "Invalid check implementation: node_info for %s is "
2072 "True, but base check %s not defined" % (check_plugin_name, section_name))
2074 elif check_info[section_name]["node_info"] != info["node_info"]:
2075 raise MKGeneralException(
2076 "Invalid check implementation: node_info for %s "
2077 "and %s are different." % ((section_name, check_plugin_name)))
2079 # Now gather snmp_info and snmp_scan_function back to the
2080 # original arrays. Note: these information is tied to a "agent section",
2081 # not to a check. Several checks may use the same SNMP info and scan function.
2082 for check_plugin_name, info in check_info.iteritems():
2083 section_name = cmk_base.check_utils.section_name_of(check_plugin_name)
2084 if info["snmp_info"] and section_name not in snmp_info:
2085 snmp_info[section_name] = info["snmp_info"]
2087 if info["snmp_scan_function"] and section_name not in snmp_scan_functions:
2088 snmp_scan_functions[section_name] = info["snmp_scan_function"]
2091 # This function validates the checks which are members of checkgroups to have either
2092 # all or none an item. Mixed checkgroups lead to strange exceptions when processing
2093 # the check parameters. So it is much better to catch these errors in a central place
2094 # with a clear error message.
2095 def verify_checkgroup_members():
2096 groups = checks_by_checkgroup()
2098 for group_name, check_entries in groups.items():
2099 with_item, without_item = [], []
2100 for check_plugin_name, check_info_entry in check_entries:
2101 # Trying to detect whether or not the check has an item. But this mechanism is not
2102 # 100% reliable since Check_MK appends an item to the service_description when "%s"
2103 # is not in the checks service_description template.
2104 # Maybe we need to define a new rule which enforces the developer to use the %s in
2105 # the service_description. At least for grouped checks.
2106 if "%s" in check_info_entry["service_description"]:
2107 with_item.append(check_plugin_name)
2108 else:
2109 without_item.append(check_plugin_name)
2111 if with_item and without_item:
2112 raise MKGeneralException(
2113 "Checkgroup %s has checks with and without item! At least one of "
2114 "the checks in this group needs to be changed (With item: %s, "
2115 "Without item: %s)" % (group_name, ", ".join(with_item), ", ".join(without_item)))
2118 def checks_by_checkgroup():
2119 groups = {}
2120 for check_plugin_name, check in check_info.items():
2121 group_name = check["group"]
2122 if group_name:
2123 groups.setdefault(group_name, [])
2124 groups[group_name].append((check_plugin_name, check))
2125 return groups
2128 # These caches both only hold the base names of the checks
2129 def initialize_check_type_caches():
2130 snmp_cache = cmk_base.runtime_cache.get_set("check_type_snmp")
2131 snmp_cache.update(snmp_info.keys())
2133 tcp_cache = cmk_base.runtime_cache.get_set("check_type_tcp")
2134 for check_plugin_name in check_info:
2135 section_name = cmk_base.check_utils.section_name_of(check_plugin_name)
2136 if section_name not in snmp_cache:
2137 tcp_cache.add(section_name)
2141 # .--Helpers-------------------------------------------------------------.
2142 # | _ _ _ |
2143 # | | | | | ___| |_ __ ___ _ __ ___ |
2144 # | | |_| |/ _ \ | '_ \ / _ \ '__/ __| |
2145 # | | _ | __/ | |_) | __/ | \__ \ |
2146 # | |_| |_|\___|_| .__/ \___|_| |___/ |
2147 # | |_| |
2148 # +----------------------------------------------------------------------+
2149 # | Misc check related helper functions |
2150 # '----------------------------------------------------------------------'
2153 def discoverable_tcp_checks():
2154 types = []
2155 for check_plugin_name, check in check_info.items():
2156 if cmk_base.check_utils.is_tcp_check(check_plugin_name) and check["inventory_function"]:
2157 types.append(check_plugin_name)
2158 return sorted(types)
2161 def discoverable_snmp_checks():
2162 types = []
2163 for check_plugin_name, check in check_info.items():
2164 if cmk_base.check_utils.is_snmp_check(check_plugin_name) and check["inventory_function"]:
2165 types.append(check_plugin_name)
2166 return sorted(types)
2169 # Compute parameters for a check honoring factory settings,
2170 # default settings of user in main.mk, check_parameters[] and
2171 # the values code in autochecks (given as parameter params)
2172 def compute_check_parameters(host, checktype, item, params):
2173 if checktype not in check_info: # handle vanished checktype
2174 return None
2176 params = _update_with_default_check_parameters(checktype, params)
2177 params = _update_with_configured_check_parameters(host, checktype, item, params)
2179 return params
2182 def _update_with_default_check_parameters(checktype, params):
2183 # Handle dictionary based checks
2184 def_levels_varname = check_info[checktype].get("default_levels_variable")
2186 # Handle case where parameter is None but the type of the
2187 # default value is a dictionary. This is for example the
2188 # case if a check type has gotten parameters in a new version
2189 # but inventory of the old version left None as a parameter.
2190 # Also from now on we support that the inventory simply puts
2191 # None as a parameter. We convert that to an empty dictionary
2192 # that will be updated with the factory settings and default
2193 # levels, if possible.
2194 if params is None and def_levels_varname:
2195 fs = factory_settings.get(def_levels_varname)
2196 if isinstance(fs, dict):
2197 params = {}
2199 # Honor factory settings for dict-type checks. Merge
2200 # dict type checks with multiple matching rules
2201 if isinstance(params, dict):
2203 # Start with factory settings
2204 if def_levels_varname:
2205 new_params = factory_settings.get(def_levels_varname, {}).copy()
2206 else:
2207 new_params = {}
2209 # Merge user's default settings onto it
2210 check_context = _check_contexts[checktype]
2211 if def_levels_varname and def_levels_varname in check_context:
2212 def_levels = check_context[def_levels_varname]
2213 if isinstance(def_levels, dict):
2214 new_params.update(def_levels)
2216 # Merge params from inventory onto it
2217 new_params.update(params)
2218 params = new_params
2220 return params
2223 def _update_with_configured_check_parameters(host, checktype, item, params):
2224 descr = service_description(host, checktype, item)
2226 config_cache = get_config_cache()
2228 # Get parameters configured via checkgroup_parameters
2229 entries = _get_checkgroup_parameters(config_cache, host, checktype, item)
2231 # Get parameters configured via check_parameters
2232 entries += config_cache.service_extra_conf(host, descr, check_parameters)
2234 if entries:
2235 if _has_timespecific_params(entries):
2236 # some parameters include timespecific settings
2237 # these will be executed just before the check execution
2238 return TimespecificParamList(entries)
2240 # loop from last to first (first must have precedence)
2241 for entry in entries[::-1]:
2242 if isinstance(params, dict) and isinstance(entry, dict):
2243 params.update(entry)
2244 else:
2245 if isinstance(entry, dict):
2246 # The entry still has the reference from the rule..
2247 # If we don't make a deepcopy the rule might be modified by
2248 # a followup params.update(...)
2249 entry = copy.deepcopy(entry)
2250 params = entry
2251 return params
2254 def _has_timespecific_params(entries):
2255 for entry in entries:
2256 if isinstance(entry, dict) and "tp_default_value" in entry:
2257 return True
2258 return False
2261 def _get_checkgroup_parameters(config_cache, host, checktype, item):
2262 checkgroup = check_info[checktype]["group"]
2263 if not checkgroup:
2264 return []
2265 rules = checkgroup_parameters.get(checkgroup)
2266 if rules is None:
2267 return []
2269 try:
2270 # checks without an item
2271 if item is None and checkgroup not in service_rule_groups:
2272 return config_cache.host_extra_conf(host, rules)
2274 # checks with an item need service-specific rules
2275 return config_cache.service_extra_conf(host, item, rules)
2276 except MKGeneralException as e:
2277 raise MKGeneralException(str(e) + " (on host %s, checktype %s)" % (host, checktype))
2280 def do_status_data_inventory_for(hostname):
2281 rules = active_checks.get('cmk_inv')
2282 if rules is None:
2283 return False
2285 # 'host_extra_conf' is already cached thus we can
2286 # use it after every check cycle.
2287 entries = get_config_cache().host_extra_conf(hostname, rules)
2289 if not entries:
2290 return False # No matching rule -> disable
2292 # Convert legacy rules to current dict format (just like the valuespec)
2293 params = {} if entries[0] is None else entries[0]
2295 return params.get('status_data_inventory', False)
2298 def do_host_label_discovery_for(hostname):
2299 rules = active_checks.get('cmk_inv')
2300 if rules is None:
2301 return True
2303 entries = get_config_cache().host_extra_conf(hostname, rules)
2305 if not entries:
2306 return True # No matching rule -> disable
2308 # Convert legacy rules to current dict format (just like the valuespec)
2309 params = {} if entries[0] is None else entries[0]
2311 return params.get("host_label_inventory", True)
2314 def filter_by_management_board(hostname,
2315 found_check_plugin_names,
2316 for_mgmt_board,
2317 for_discovery=False,
2318 for_inventory=False):
2320 In order to decide which check is used for which data source
2321 we have to filter the found check plugins. This is done via
2322 the check_info key "management_board". There are three values
2323 with the following meanings:
2324 - MGMT_ONLY
2325 These check plugins
2326 - are only used for management board data sources,
2327 - have the prefix 'mgmt_' in their name,
2328 - have the prefix 'Management Interface:' in their service description.
2329 - If there is an equivalent host check plugin then it must be 'HOST_ONLY'.
2331 - HOST_PRECEDENCE
2332 - Default value for all check plugins.
2333 - It does not have to be declared in the check_info.
2334 - Special situation for SNMP management boards:
2335 - If a host is not a SNMP host these checks are used for
2336 the SNMP management boards.
2337 - If a host is a SNMP host these checks are used for
2338 the host itself.
2340 - HOST_ONLY
2341 These check plugins
2342 - are used for 'real' host data sources, not for host management board data sources
2343 - there is an equivalent 'MGMT_ONLY'-management board check plugin.
2346 mgmt_only, host_precedence_snmp, host_only_snmp, host_precedence_tcp, host_only_tcp =\
2347 _get_categorized_check_plugins(found_check_plugin_names, for_inventory=for_inventory)
2349 config_cache = get_config_cache()
2350 host_config = config_cache.get_host_config(hostname)
2352 final_collection = set()
2353 if not host_config.has_management_board:
2354 if host_config.is_snmp_host:
2355 final_collection.update(host_precedence_snmp)
2356 final_collection.update(host_only_snmp)
2357 if host_config.is_agent_host:
2358 final_collection.update(host_precedence_tcp)
2359 final_collection.update(host_only_tcp)
2360 return final_collection
2362 if for_mgmt_board:
2363 final_collection.update(mgmt_only)
2364 if not host_config.is_snmp_host:
2365 final_collection.update(host_precedence_snmp)
2366 if not for_discovery:
2367 # Migration from 1.4 to 1.5:
2368 # in 1.4 TCP hosts with SNMP management boards discovered TCP and
2369 # SNMP checks, eg. uptime and snmp_uptime. During checking phase
2370 # these checks should be executed
2371 # further on.
2372 # In versions >= 1.5 there are management board specific check
2373 # plugins, eg. mgmt_snmp_uptime.
2374 # After a re-discovery Check_MK finds the uptime check plugin for
2375 # the TCP host and the mgmt_snmp_uptime check for the SNMP
2376 # management board. Moreover Check_MK eliminates 'HOST_ONLT'
2377 # checks like snmp_uptime.
2378 final_collection.update(host_only_snmp)
2380 else:
2381 if host_config.is_snmp_host:
2382 final_collection.update(host_precedence_snmp)
2383 final_collection.update(host_only_snmp)
2384 if host_config.is_agent_host:
2385 final_collection.update(host_precedence_tcp)
2386 final_collection.update(host_only_tcp)
2388 return final_collection
2391 def _get_categorized_check_plugins(check_plugin_names, for_inventory=False):
2392 if for_inventory:
2393 is_snmp_check_f = cmk_base.inventory_plugins.is_snmp_plugin
2394 plugins_info = cmk_base.inventory_plugins.inv_info
2395 else:
2396 is_snmp_check_f = cmk_base.check_utils.is_snmp_check
2397 plugins_info = check_info
2399 mgmt_only = set()
2400 host_precedence_snmp = set()
2401 host_precedence_tcp = set()
2402 host_only_snmp = set()
2403 host_only_tcp = set()
2405 for check_plugin_name in check_plugin_names:
2406 if check_plugin_name not in plugins_info:
2407 msg = "Unknown plugin file %s" % check_plugin_name
2408 if cmk.utils.debug.enabled():
2409 raise MKGeneralException(msg)
2410 else:
2411 console.verbose("%s\n" % msg)
2412 continue
2414 is_snmp_check_ = is_snmp_check_f(check_plugin_name)
2415 mgmt_board = _get_management_board_precedence(check_plugin_name, plugins_info)
2416 if mgmt_board == check_api_utils.HOST_PRECEDENCE:
2417 if is_snmp_check_:
2418 host_precedence_snmp.add(check_plugin_name)
2419 else:
2420 host_precedence_tcp.add(check_plugin_name)
2422 elif mgmt_board == check_api_utils.MGMT_ONLY:
2423 mgmt_only.add(check_plugin_name)
2425 elif mgmt_board == check_api_utils.HOST_ONLY:
2426 if is_snmp_check_:
2427 host_only_snmp.add(check_plugin_name)
2428 else:
2429 host_only_tcp.add(check_plugin_name)
2431 return mgmt_only, host_precedence_snmp, host_only_snmp,\
2432 host_precedence_tcp, host_only_tcp
2435 def _get_management_board_precedence(check_plugin_name, plugins_info):
2436 mgmt_board = plugins_info[check_plugin_name].get("management_board")
2437 if mgmt_board is None:
2438 return check_api_utils.HOST_PRECEDENCE
2439 return mgmt_board
2442 cmk_base.cleanup.register_cleanup(check_api_utils.reset_hostname)
2445 # .--Host Configuration--------------------------------------------------.
2446 # | _ _ _ |
2447 # | | | | | ___ ___| |_ |
2448 # | | |_| |/ _ \/ __| __| |
2449 # | | _ | (_) \__ \ |_ |
2450 # | |_| |_|\___/|___/\__| |
2451 # | |
2452 # | ____ __ _ _ _ |
2453 # | / ___|___ _ __ / _(_) __ _ _ _ _ __ __ _| |_(_) ___ _ __ |
2454 # | | | / _ \| '_ \| |_| |/ _` | | | | '__/ _` | __| |/ _ \| '_ \ |
2455 # | | |__| (_) | | | | _| | (_| | |_| | | | (_| | |_| | (_) | | | | |
2456 # | \____\___/|_| |_|_| |_|\__, |\__,_|_| \__,_|\__|_|\___/|_| |_| |
2457 # | |___/ |
2458 # +----------------------------------------------------------------------+
2461 class HostConfig(object):
2462 def __init__(self, config_cache, hostname):
2463 # type: (ConfigCache, str) -> None
2464 super(HostConfig, self).__init__()
2465 self.hostname = hostname
2467 self._config_cache = config_cache
2469 self.alias = self._get_alias()
2470 self.parents = self._get_parents()
2472 self.is_cluster = self._is_cluster()
2473 # TODO: Rename this to self.clusters?
2474 self.part_of_clusters = self._config_cache.clusters_of(hostname)
2475 self.nodes = self._config_cache.nodes_of(hostname)
2477 # TODO: Rename self.tags to self.tag_list and self.tag_groups to self.tags
2478 self.tags = self._config_cache.tag_list_of_host(self.hostname)
2479 self.tag_groups = host_tags.get(hostname, {})
2480 self.labels = self._get_host_labels()
2481 self.label_sources = self._get_host_label_sources()
2482 self.ruleset_match_object = self._get_ruleset_match_object()
2484 # Basic types
2485 self.is_tcp_host = self._config_cache.in_binary_hostlist(hostname, tcp_hosts)
2486 self.is_snmp_host = self._config_cache.in_binary_hostlist(hostname, snmp_hosts)
2487 self.is_usewalk_host = self._config_cache.in_binary_hostlist(hostname, usewalk_hosts)
2489 if "piggyback" in self.tags:
2490 self.is_piggyback_host = True
2491 elif "no-piggyback" in self.tags:
2492 self.is_piggyback_host = False
2493 else: # Legacy automatic detection
2494 self.is_piggyback_host = self.has_piggyback_data
2496 # Agent types
2497 self.is_agent_host = self.is_tcp_host or self.is_piggyback_host
2498 self.management_protocol = management_protocol.get(hostname)
2499 self.has_management_board = self.management_protocol is not None
2501 self.is_ping_host = not self.is_snmp_host and\
2502 not self.is_agent_host and\
2503 not self.has_management_board
2505 self.is_dual_host = self.is_tcp_host and self.is_snmp_host
2506 self.is_all_agents_host = "all-agents" in self.tags
2507 self.is_all_special_agents_host = "special-agents" in self.tags
2509 # IP addresses
2510 # Whether or not the given host is configured not to be monitored via IP
2511 self.is_no_ip_host = "no-ip" in self.tags
2512 self.is_ipv6_host = "ip-v6" in self.tags
2513 # Whether or not the given host is configured to be monitored via IPv4.
2514 # This is the case when it is set to be explicit IPv4 or implicit (when
2515 # host is not an IPv6 host and not a "No IP" host)
2516 self.is_ipv4_host = "ip-v4" in self.tags or (not self.is_ipv6_host and
2517 not self.is_no_ip_host)
2519 self.is_ipv4v6_host = "ip-v6" in self.tags and "ip-v4" in self.tags
2521 # Whether or not the given host is configured to be monitored primarily via IPv6
2522 self.is_ipv6_primary = (not self.is_ipv4v6_host and self.is_ipv6_host) \
2523 or (self.is_ipv4v6_host and self._primary_ip_address_family_of() == "ipv6")
2525 def _get_ruleset_match_object(self):
2526 # type: () -> RulesetMatchObject
2527 """Construct the dictionary object that is needed to match this host to rulesets"""
2528 return RulesetMatchObject(
2529 host_name=self.hostname,
2530 host_tags=self.tag_groups,
2533 @property
2534 def has_piggyback_data(self):
2535 if piggyback.has_piggyback_raw_data(piggyback_max_cachefile_age, self.hostname):
2536 return True
2538 from cmk_base.data_sources.abstract import has_persisted_agent_sections
2539 return has_persisted_agent_sections("piggyback", self.hostname)
2541 def _primary_ip_address_family_of(self):
2542 rules = self._config_cache.host_extra_conf(self.hostname, primary_address_family)
2543 if rules:
2544 return rules[0]
2545 return "ipv4"
2547 def _get_alias(self):
2548 # type: () -> Text
2549 aliases = self._config_cache.host_extra_conf(self.hostname, extra_host_conf.get(
2550 "alias", []))
2551 if not aliases:
2552 return self.hostname
2554 return aliases[0]
2556 # TODO: Move cluster/node parent handling to this function
2557 def _get_parents(self):
2558 # type: () -> List[str]
2559 """Use only those parents which are defined and active in all_hosts"""
2560 used_parents = []
2561 for parent_names in self._config_cache.host_extra_conf(self.hostname, parents):
2562 for parent_name in parent_names.split(","):
2563 if parent_name in self._config_cache.all_active_realhosts():
2564 used_parents.append(parent_name)
2565 return used_parents
2567 def _get_host_labels(self):
2568 """Returns the effective set of host labels from all available sources
2570 1. Discovered labels
2571 2. Ruleset "Host labels"
2572 3. Explicit labels (via host/folder config)
2574 Last one wins.
2576 labels = {}
2577 labels.update(self._discovered_labels_of_host())
2578 labels.update(self._config_cache.host_extra_conf_merged(self.hostname, host_label_rules))
2579 labels.update(host_labels.get(self.hostname, {}))
2580 return labels
2582 def _get_host_label_sources(self):
2583 """Returns the effective set of host label keys with their source identifier instead of the value
2584 Order and merging logic is equal to _get_host_labels()"""
2585 labels = {}
2586 labels.update({k: "discovered" for k in self._discovered_labels_of_host().keys()})
2587 labels.update({k : "ruleset" \
2588 for k in self._config_cache.host_extra_conf_merged(self.hostname, host_label_rules)})
2589 labels.update({k: "explicit" for k in host_labels.get(self.hostname, {}).keys()})
2590 return labels
2592 def _discovered_labels_of_host(self):
2593 # type: () -> Dict
2594 return DiscoveredHostLabelsStore(self.hostname).load()
2596 def snmp_config(self, ipaddress):
2597 # type: (str) -> cmk_base.snmp_utils.SNMPHostConfig
2598 return cmk_base.snmp_utils.SNMPHostConfig(
2599 is_ipv6_primary=self.is_ipv6_primary,
2600 hostname=self.hostname,
2601 ipaddress=ipaddress,
2602 credentials=self._snmp_credentials(),
2603 port=self._snmp_port(),
2604 is_bulkwalk_host=self._config_cache.in_binary_hostlist(self.hostname, bulkwalk_hosts),
2605 is_snmpv2or3_without_bulkwalk_host=self._config_cache.in_binary_hostlist(
2606 self.hostname, snmpv2c_hosts),
2607 bulk_walk_size_of=self._bulk_walk_size(),
2608 timing=self._snmp_timing(),
2609 oid_range_limits=self._config_cache.host_extra_conf(self.hostname,
2610 snmp_limit_oid_range),
2611 snmpv3_contexts=self._config_cache.host_extra_conf(self.hostname, snmpv3_contexts),
2612 character_encoding=self._snmp_character_encoding(),
2613 is_usewalk_host=self.is_usewalk_host,
2614 is_inline_snmp_host=self._is_inline_snmp_host(),
2617 def _snmp_credentials(self):
2618 """Determine SNMP credentials for a specific host
2620 It the host is found int the map snmp_communities, that community is
2621 returned. Otherwise the snmp_default_community is returned (wich is
2622 preset with "public", but can be overridden in main.mk
2624 try:
2625 return explicit_snmp_communities[self.hostname]
2626 except KeyError:
2627 pass
2629 communities = self._config_cache.host_extra_conf(self.hostname, snmp_communities)
2630 if communities:
2631 return communities[0]
2633 # nothing configured for this host -> use default
2634 return snmp_default_community
2636 def _snmp_port(self):
2637 # type: () -> int
2638 ports = self._config_cache.host_extra_conf(self.hostname, snmp_ports)
2639 if not ports:
2640 return 161
2641 return ports[0]
2643 def _snmp_timing(self):
2644 timing = self._config_cache.host_extra_conf(self.hostname, snmp_timing)
2645 if not timing:
2646 return {}
2647 return timing[0]
2649 def _bulk_walk_size(self):
2650 bulk_sizes = self._config_cache.host_extra_conf(self.hostname, snmp_bulk_size)
2651 if not bulk_sizes:
2652 return 10
2653 return bulk_sizes[0]
2655 def _snmp_character_encoding(self):
2656 entries = self._config_cache.host_extra_conf(self.hostname, snmp_character_encodings)
2657 if not entries:
2658 return None
2659 return entries[0]
2661 def _is_inline_snmp_host(self):
2662 # TODO: Better use "inline_snmp" once we have moved the code to an own module
2663 has_inline_snmp = "netsnmp" in sys.modules
2664 return has_inline_snmp and use_inline_snmp \
2665 and not self._config_cache.in_binary_hostlist(self.hostname, non_inline_snmp_hosts)
2667 def _is_cluster(self):
2668 """Checks whether or not the given host is a cluster host
2669 all_configured_clusters() needs to be used, because this function affects
2670 the agent bakery, which needs all configured hosts instead of just the hosts
2671 of this site"""
2672 return self.hostname in self._config_cache.all_configured_clusters()
2676 # .--Configuration Cache-------------------------------------------------.
2677 # | ____ __ _ _ _ |
2678 # | / ___|___ _ __ / _(_) __ _ _ _ _ __ __ _| |_(_) ___ _ __ |
2679 # | | | / _ \| '_ \| |_| |/ _` | | | | '__/ _` | __| |/ _ \| '_ \ |
2680 # | | |__| (_) | | | | _| | (_| | |_| | | | (_| | |_| | (_) | | | | |
2681 # | \____\___/|_| |_|_| |_|\__, |\__,_|_| \__,_|\__|_|\___/|_| |_| |
2682 # | |___/ |
2683 # | ____ _ |
2684 # | / ___|__ _ ___| |__ ___ |
2685 # | | | / _` |/ __| '_ \ / _ \ |
2686 # | | |__| (_| | (__| | | | __/ |
2687 # | \____\__,_|\___|_| |_|\___| |
2688 # | |
2689 # +----------------------------------------------------------------------+
2692 class ConfigCache(object):
2693 def __init__(self):
2694 super(ConfigCache, self).__init__()
2695 self._initialize_caches()
2697 def initialize(self):
2698 self._initialize_caches()
2699 self._collect_hosttags()
2700 self._setup_clusters_nodes_cache()
2702 self._all_configured_clusters = self._get_all_configured_clusters()
2703 self._all_configured_realhosts = self._get_all_configured_realhosts()
2704 self._all_configured_hosts = self._get_all_configured_hosts()
2705 # TODO: Clean this one up?
2706 self._initialize_host_lookup()
2708 self._all_active_clusters = self._get_all_active_clusters()
2709 self._all_active_realhosts = self._get_all_active_realhosts()
2710 self._all_active_hosts = self._get_all_active_hosts()
2711 self._all_processed_hosts = self._all_active_hosts
2713 def _initialize_caches(self):
2714 self.single_host_checks = cmk_base.config_cache.get_dict("single_host_checks")
2715 self.multi_host_checks = cmk_base.config_cache.get_list("multi_host_checks")
2716 self.check_table_cache = cmk_base.config_cache.get_dict("check_tables")
2718 self._cache_is_snmp_check = cmk_base.runtime_cache.get_dict("is_snmp_check")
2719 self._cache_is_tcp_check = cmk_base.runtime_cache.get_dict("is_tcp_check")
2720 self._cache_section_name_of = {}
2722 # Host lookup
2724 # Contains all hostnames which are currently relevant for this cache
2725 # Most of the time all_processed hosts is similar to all_active_hosts
2726 # Howewer, in a multiprocessing environment all_processed_hosts only
2727 # may contain a reduced set of hosts, since each process handles a subset
2728 self._all_processed_hosts = set()
2729 self._all_configured_hosts = set()
2730 self._all_configured_clusters = set()
2731 self._all_configured_realhosts = set()
2732 self._all_active_clusters = set()
2733 self._all_active_realhosts = set()
2735 # Reference hostname -> dirname including /
2736 self._host_paths = {}
2737 # Reference dirname -> hosts in this dir including subfolders
2738 self._folder_host_lookup = {}
2739 # All used folders used for various set intersection operations
2740 self._folder_path_set = set()
2742 # Host tags
2743 self._hosttags = {}
2744 self._hosttags_without_folder = {}
2746 # Reference hosttags_without_folder -> list of hosts
2747 # Provides a list of hosts with the same hosttags, excluding the folder
2748 self._hosts_grouped_by_tags = {}
2749 # Reference hostname -> tag group reference
2750 self._host_grouped_ref = {}
2752 # Autochecks cache
2753 self._autochecks_cache = {}
2755 # Cache for all_matching_host
2756 self._all_matching_hosts_match_cache = {}
2758 # Caches for host_extra_conf
2759 self._host_extra_conf_ruleset_cache = {}
2760 self._host_extra_conf_match_cache = {}
2762 # Caches for service_extra_conf
2763 self._service_extra_conf_ruleset_cache = {}
2764 self._service_extra_conf_host_matched_ruleset_cache = {}
2765 self._service_extra_conf_match_cache = {}
2767 # Caches for in_boolean_serviceconf_list
2768 self._in_boolean_service_conf_list_ruleset_cache = {}
2769 self._in_boolean_service_conf_list_match_cache = {}
2771 # Cache for in_binary_hostlist
2772 self._in_binary_hostlist_cache = {}
2774 # Caches for nodes and clusters
2775 self._clusters_of_cache = {}
2776 self._nodes_of_cache = {}
2778 # A factor which indicates how much hosts share the same host tag configuration (excluding folders).
2779 # len(all_processed_hosts) / len(different tag combinations)
2780 # It is used to determine the best rule evualation method
2781 self._all_processed_hosts_similarity = 1
2783 # Keep HostConfig instances created with the current configuration cache
2784 self._host_configs = {}
2786 def get_host_config(self, hostname):
2787 # type: (str) -> HostConfig
2788 """Returns a HostConfig instance for the given host
2790 It lazy initializes the host config object and caches the objects during the livetime
2791 of the ConfigCache."""
2792 host_config = self._host_configs.get(hostname)
2793 if host_config:
2794 return host_config
2796 host_config = self._host_configs[hostname] = HostConfig(self, hostname)
2797 return host_config
2799 def _collect_hosttags(self):
2800 for tagged_host in all_hosts + clusters.keys():
2801 parts = tagged_host.split("|")
2802 self._hosttags[parts[0]] = set(parts[1:])
2804 # TODO: check all call sites and remove this
2805 def tag_list_of_host(self, hostname):
2806 """Returns the list of all configured tags of a host. In case
2807 a host has no tags configured or is not known, it returns an
2808 empty list."""
2809 return self._hosttags.get(hostname, [])
2811 def tags_of_host(self, hostname):
2812 """Returns the dict of all configured tag groups and values of a host"""
2813 return host_tags.get(hostname, {})
2815 def tags_of_service(self, hostname, svc_desc):
2816 """Returns the dict of all configured tags of a service
2817 It takes all explicitly configured tag groups into account.
2819 tags = {}
2820 for entry in self.service_extra_conf(hostname, svc_desc, service_tag_rules):
2821 tags.update(entry)
2822 return tags
2824 def labels_of_service(self, hostname, svc_desc):
2825 """Returns the effective set of service labels from all available sources
2827 1. Discovered labels
2828 2. Ruleset "Service labels"
2830 Last one wins.
2832 labels = {}
2833 labels.update(self.service_extra_conf_merged(hostname, svc_desc, service_label_rules))
2834 return labels
2836 def label_sources_of_service(self, hostname, svc_desc):
2837 """Returns the effective set of service label keys with their source identifier instead of the value
2838 Order and merging logic is equal to labels_of_service()"""
2839 labels = {}
2840 labels.update({
2841 k: "ruleset"
2842 for k in self.service_extra_conf_merged(hostname, svc_desc, service_label_rules)
2844 return labels
2846 def ruleset_match_object_of_service(self, hostname, svc_desc):
2847 # type: (str, Text) -> RulesetMatchObject
2848 """Construct the dictionary object that is needed to match this service to rulesets
2850 This is done by loading the host match object and extending it with the
2851 information of this service.
2853 host_config = self.get_host_config(hostname)
2854 match_object = host_config.ruleset_match_object.copy()
2856 match_object.service_description = svc_desc
2858 return match_object
2860 def set_all_processed_hosts(self, all_processed_hosts):
2861 self._all_processed_hosts = set(all_processed_hosts)
2863 nodes_and_clusters = set()
2864 for hostname in self._all_processed_hosts:
2865 nodes_and_clusters.update(self._nodes_of_cache.get(hostname, []))
2866 nodes_and_clusters.update(self._clusters_of_cache.get(hostname, []))
2867 self._all_processed_hosts.update(nodes_and_clusters)
2869 # The folder host lookup includes a list of all -processed- hosts within a given
2870 # folder. Any update with set_all_processed hosts invalidates this cache, because
2871 # the scope of relevant hosts has changed. This is -good-, since the values in this
2872 # lookup are iterated one by one later on in all_matching_hosts
2873 self._folder_host_lookup = {}
2875 self._adjust_processed_hosts_similarity()
2877 def _adjust_processed_hosts_similarity(self):
2878 """ This function computes the tag similarities between of the processed hosts
2879 The result is a similarity factor, which helps finding the most perfomant operation
2880 for the current hostset """
2881 used_groups = set()
2882 for hostname in self._all_processed_hosts:
2883 used_groups.add(self._host_grouped_ref[hostname])
2884 self._all_processed_hosts_similarity = (
2885 1.0 * len(self._all_processed_hosts) / len(used_groups))
2887 def _initialize_host_lookup(self):
2888 for hostname in self._all_configured_hosts:
2889 dirname_of_host = os.path.dirname(host_paths[hostname])
2890 if dirname_of_host[-1] != "/":
2891 dirname_of_host += "/"
2892 self._host_paths[hostname] = dirname_of_host
2894 # Determine hosts within folders
2895 dirnames = [
2896 x[0][len(cmk.utils.paths.check_mk_config_dir):] + "/+"
2897 for x in os.walk(cmk.utils.paths.check_mk_config_dir)
2899 self._folder_path_set = set(dirnames)
2901 # Determine hosttags without folder tag
2902 for hostname in self._all_configured_hosts:
2903 tags_without_folder = set(self._hosttags[hostname])
2904 try:
2905 tags_without_folder.remove(self._host_paths[hostname])
2906 except KeyError:
2907 pass
2909 self._hosttags_without_folder[hostname] = tags_without_folder
2911 # Determine hosts with same tag setup (ignoring folder tag)
2912 for hostname in self._all_configured_hosts:
2913 group_ref = tuple(sorted(self._hosttags_without_folder[hostname]))
2914 self._hosts_grouped_by_tags.setdefault(group_ref, set()).add(hostname)
2915 self._host_grouped_ref[hostname] = group_ref
2917 def get_hosts_within_folder(self, folder_path, with_foreign_hosts):
2918 cache_id = with_foreign_hosts, folder_path
2919 if cache_id not in self._folder_host_lookup:
2920 hosts_in_folder = set()
2921 # Strip off "+"
2922 folder_path_tmp = folder_path[:-1]
2923 relevant_hosts = self._all_configured_hosts if with_foreign_hosts else self._all_processed_hosts
2924 for hostname in relevant_hosts:
2925 if self._host_paths[hostname].startswith(folder_path_tmp):
2926 hosts_in_folder.add(hostname)
2927 self._folder_host_lookup[cache_id] = hosts_in_folder
2928 return hosts_in_folder
2929 return self._folder_host_lookup[cache_id]
2931 def get_autochecks_of(self, hostname):
2932 try:
2933 return self._autochecks_cache[hostname]
2934 except KeyError:
2935 result = cmk_base.autochecks.read_autochecks_of(hostname)
2936 self._autochecks_cache[hostname] = result
2937 return result
2939 def section_name_of(self, section):
2940 try:
2941 return self._cache_section_name_of[section]
2942 except KeyError:
2943 section_name = cmk_base.check_utils.section_name_of(section)
2944 self._cache_section_name_of[section] = section_name
2945 return section_name
2947 def is_snmp_check(self, check_plugin_name):
2948 try:
2949 return self._cache_is_snmp_check[check_plugin_name]
2950 except KeyError:
2951 snmp_checks = cmk_base.runtime_cache.get_set("check_type_snmp")
2952 result = self.section_name_of(check_plugin_name) in snmp_checks
2953 self._cache_is_snmp_check[check_plugin_name] = result
2954 return result
2956 def is_tcp_check(self, check_plugin_name):
2957 try:
2958 return self._cache_is_tcp_check[check_plugin_name]
2959 except KeyError:
2960 tcp_checks = cmk_base.runtime_cache.get_set("check_type_tcp")
2961 result = self.section_name_of(check_plugin_name) in tcp_checks
2962 self._cache_is_tcp_check[check_plugin_name] = result
2963 return result
2965 def filter_hosts_with_same_tags_as_host(self, hostname, hosts):
2966 return self._hosts_grouped_by_tags[self._host_grouped_ref[hostname]].intersection(hosts)
2968 def all_matching_hosts(self, tags, hostlist, with_foreign_hosts):
2969 """Returns a set containing the names of hosts that match the given
2970 tags and hostlist conditions."""
2971 cache_id = tuple(tags), tuple(hostlist), with_foreign_hosts
2973 try:
2974 return self._all_matching_hosts_match_cache[cache_id]
2975 except KeyError:
2976 pass
2978 if with_foreign_hosts:
2979 valid_hosts = self._all_configured_hosts
2980 else:
2981 valid_hosts = self._all_processed_hosts
2983 tags_set = set(tags)
2984 tags_set_without_folder = tags_set
2985 rule_path_set = tags_set.intersection(self._folder_path_set)
2986 tags_set_without_folder = tags_set - rule_path_set
2988 if rule_path_set:
2989 # More than one dynamic folder in one rule is simply wrong..
2990 rule_path = list(rule_path_set)[0]
2991 else:
2992 rule_path = "/+"
2994 # Thin out the valid hosts further. If the rule is located in a folder
2995 # we only need the intersection of the folders hosts and the previously determined valid_hosts
2996 valid_hosts = self.get_hosts_within_folder(rule_path,
2997 with_foreign_hosts).intersection(valid_hosts)
2999 # Contains matched hosts
3001 if tags_set_without_folder and hostlist == ALL_HOSTS:
3002 return self._match_hosts_by_tags(cache_id, valid_hosts, tags_set_without_folder)
3004 matching = set([])
3005 only_specific_hosts = not bool([x for x in hostlist if x[0] in ["@", "!", "~"]])
3007 # If no tags are specified and there are only specific hosts we already have the matches
3008 if not tags_set_without_folder and only_specific_hosts:
3009 matching = valid_hosts.intersection(hostlist)
3010 # If no tags are specified and the hostlist only include @all (all hosts)
3011 elif not tags_set_without_folder and hostlist == ALL_HOSTS:
3012 matching = valid_hosts
3013 else:
3014 # If the rule has only exact host restrictions, we can thin out the list of hosts to check
3015 if only_specific_hosts:
3016 hosts_to_check = valid_hosts.intersection(set(hostlist))
3017 else:
3018 hosts_to_check = valid_hosts
3020 for hostname in hosts_to_check:
3021 # When no tag matching is requested, do not filter by tags. Accept all hosts
3022 # and filter only by hostlist
3023 if (not tags or
3024 hosttags_match_taglist(self._hosttags[hostname], tags_set_without_folder)):
3025 if in_extraconf_hostlist(hostlist, hostname):
3026 matching.add(hostname)
3028 self._all_matching_hosts_match_cache[cache_id] = matching
3029 return matching
3031 def _match_hosts_by_tags(self, cache_id, valid_hosts, tags_set_without_folder):
3032 matching = set([])
3033 has_specific_folder_tag = sum([x[0] == "/" for x in tags_set_without_folder])
3034 negative_match_tags = set()
3035 positive_match_tags = set()
3036 for tag in tags_set_without_folder:
3037 if tag[0] == "!":
3038 negative_match_tags.add(tag[1:])
3039 else:
3040 positive_match_tags.add(tag)
3042 if has_specific_folder_tag or self._all_processed_hosts_similarity < 3:
3043 # Without shared folders
3044 for hostname in valid_hosts:
3045 if not positive_match_tags - self._hosttags[hostname]:
3046 if not negative_match_tags.intersection(self._hosttags[hostname]):
3047 matching.add(hostname)
3049 self._all_matching_hosts_match_cache[cache_id] = matching
3050 return matching
3052 # With shared folders
3053 checked_hosts = set()
3054 for hostname in valid_hosts:
3055 if hostname in checked_hosts:
3056 continue
3058 hosts_with_same_tag = self.filter_hosts_with_same_tags_as_host(hostname, valid_hosts)
3059 checked_hosts.update(hosts_with_same_tag)
3061 if not positive_match_tags - self._hosttags[hostname]:
3062 if not negative_match_tags.intersection(self._hosttags[hostname]):
3063 matching.update(hosts_with_same_tag)
3065 self._all_matching_hosts_match_cache[cache_id] = matching
3066 return matching
3068 def host_extra_conf_merged(self, hostname, conf):
3069 rule_dict = {}
3070 for rule in self.host_extra_conf(hostname, conf):
3071 for key, value in rule.items():
3072 rule_dict.setdefault(key, value)
3073 return rule_dict
3075 def host_extra_conf(self, hostname, ruleset):
3076 with_foreign_hosts = hostname not in self._all_processed_hosts
3077 cache_id = id(ruleset), with_foreign_hosts
3078 try:
3079 return self._host_extra_conf_match_cache[cache_id][hostname]
3080 except KeyError:
3081 pass
3083 try:
3084 ruleset = self._host_extra_conf_ruleset_cache[cache_id]
3085 except KeyError:
3086 ruleset = self._convert_host_ruleset(ruleset, with_foreign_hosts)
3087 self._host_extra_conf_ruleset_cache[cache_id] = ruleset
3088 new_cache = {}
3089 for value, hostname_list in ruleset:
3090 for other_hostname in hostname_list:
3091 new_cache.setdefault(other_hostname, []).append(value)
3092 self._host_extra_conf_match_cache[cache_id] = new_cache
3094 if hostname not in self._host_extra_conf_match_cache[cache_id]:
3095 return []
3097 return self._host_extra_conf_match_cache[cache_id][hostname]
3099 def _convert_host_ruleset(self, ruleset, with_foreign_hosts):
3100 new_rules = []
3101 if len(ruleset) == 1 and ruleset[0] == "":
3102 console.warning('deprecated entry [ "" ] in host configuration list')
3104 for rule in ruleset:
3105 item, tags, hostlist, rule_options = parse_host_rule(rule)
3106 if rule_options.get("disabled"):
3107 continue
3109 # Directly compute set of all matching hosts here, this
3110 # will avoid recomputation later
3111 new_rules.append((item, self.all_matching_hosts(tags, hostlist, with_foreign_hosts)))
3113 return new_rules
3115 def service_extra_conf(self, hostname, service, ruleset):
3116 """Compute outcome of a service rule set that has an item."""
3117 # When the requested host is part of the local sites configuration,
3118 # then use only the sites hosts for processing the rules
3119 with_foreign_hosts = hostname not in self._all_processed_hosts
3120 cache_id = id(ruleset), with_foreign_hosts
3122 cached_ruleset = self._service_extra_conf_ruleset_cache.get(cache_id)
3123 if cached_ruleset is None:
3124 cached_ruleset = self._convert_service_ruleset(
3125 ruleset, with_foreign_hosts=with_foreign_hosts)
3126 self._service_extra_conf_ruleset_cache[cache_id] = cached_ruleset
3128 entries = []
3130 for value, hosts, service_matchers in cached_ruleset:
3131 if hostname not in hosts:
3132 continue
3134 descr_cache_id = service_matchers, service
3136 # 20% faster without exception handling
3137 # self._profile_log("descr cache id %r" % (descr_cache_id))
3138 match = self._service_extra_conf_match_cache.get(descr_cache_id)
3139 if match is None:
3140 match = _in_servicematcher_list(service_matchers, service)
3141 self._service_extra_conf_match_cache[descr_cache_id] = match
3143 if match:
3144 entries.append(value)
3146 return entries
3148 def service_extra_conf_merged(self, hostname, service, ruleset):
3149 rule_dict = {}
3150 for rule in self.service_extra_conf(hostname, service, ruleset):
3151 for key, value in rule.items():
3152 rule_dict.setdefault(key, value)
3153 return rule_dict
3155 def _convert_service_ruleset(self, ruleset, with_foreign_hosts):
3156 new_rules = []
3157 for rule in ruleset:
3158 rule, rule_options = get_rule_options(rule)
3159 if rule_options.get("disabled"):
3160 continue
3162 num_elements = len(rule)
3163 if num_elements == 3:
3164 item, hostlist, servlist = rule
3165 tags = []
3166 elif num_elements == 4:
3167 item, tags, hostlist, servlist = rule
3168 else:
3169 raise MKGeneralException("Invalid rule '%r' in service configuration "
3170 "list: must have 3 or 4 elements" % (rule,))
3172 # Directly compute set of all matching hosts here, this
3173 # will avoid recomputation later
3174 hosts = self.all_matching_hosts(tags, hostlist, with_foreign_hosts)
3176 # And now preprocess the configured patterns in the servlist
3177 new_rules.append((item, hosts, _convert_pattern_list(servlist)))
3179 return new_rules
3181 # Compute outcome of a service rule set that just say yes/no
3182 def in_boolean_serviceconf_list(self, hostname, descr, ruleset):
3183 # When the requested host is part of the local sites configuration,
3184 # then use only the sites hosts for processing the rules
3185 with_foreign_hosts = hostname not in self._all_processed_hosts
3186 cache_id = id(ruleset), with_foreign_hosts
3187 try:
3188 ruleset = self._in_boolean_service_conf_list_ruleset_cache[cache_id]
3189 except KeyError:
3190 ruleset = self._convert_boolean_service_ruleset(ruleset, with_foreign_hosts)
3191 self._in_boolean_service_conf_list_ruleset_cache[cache_id] = ruleset
3193 for negate, hosts, service_matchers in ruleset:
3194 if hostname in hosts:
3195 cache_id = service_matchers, descr
3196 try:
3197 match = self._in_boolean_service_conf_list_match_cache[cache_id]
3198 except KeyError:
3199 match = _in_servicematcher_list(service_matchers, descr)
3200 self._in_boolean_service_conf_list_match_cache[cache_id] = match
3202 if match:
3203 return not negate
3204 return False # no match. Do not ignore
3206 def _convert_boolean_service_ruleset(self, ruleset, with_foreign_hosts):
3207 new_rules = []
3208 for rule in ruleset:
3209 entry, rule_options = get_rule_options(rule)
3210 if rule_options.get("disabled"):
3211 continue
3213 if entry[0] == NEGATE: # this entry is logically negated
3214 negate = True
3215 entry = entry[1:]
3216 else:
3217 negate = False
3219 if len(entry) == 2:
3220 hostlist, servlist = entry
3221 tags = []
3222 elif len(entry) == 3:
3223 tags, hostlist, servlist = entry
3224 else:
3225 raise MKGeneralException("Invalid entry '%r' in configuration: "
3226 "must have 2 or 3 elements" % (entry,))
3228 # Directly compute set of all matching hosts here, this
3229 # will avoid recomputation later
3230 hosts = self.all_matching_hosts(tags, hostlist, with_foreign_hosts)
3231 new_rules.append((negate, hosts, _convert_pattern_list(servlist)))
3233 return new_rules
3235 def all_active_hosts(self):
3236 # type: () -> Set[str]
3237 """Returns a set of all active hosts"""
3238 return self._all_active_hosts
3240 def _get_all_active_hosts(self):
3241 # type: () -> Set[str]
3242 hosts = set() # type: Set[str]
3243 hosts.update(self.all_active_realhosts(), self.all_active_clusters())
3244 return hosts
3246 def all_active_realhosts(self):
3247 # type: () -> Set[str]
3248 """Returns a set of all host names to be handled by this site hosts of other sites or disabled hosts are excluded"""
3249 return self._all_active_realhosts
3251 def _get_all_active_realhosts(self):
3252 # type: () -> Set[str]
3253 return _filter_active_hosts(self, self._all_configured_realhosts)
3255 def all_configured_realhosts(self):
3256 # type: () -> Set[str]
3257 return self._all_configured_realhosts
3259 def _get_all_configured_realhosts(self):
3260 # type: () -> Set[str]
3261 """Returns a set of all host names, regardless if currently disabled or
3262 monitored on a remote site. Does not return cluster hosts."""
3263 return set(strip_tags(all_hosts))
3265 def all_configured_hosts(self):
3266 # type: () -> Set[str]
3267 return self._all_configured_hosts
3269 def _get_all_configured_hosts(self):
3270 # type: () -> Set[str]
3271 """Returns a set of all hosts, regardless if currently disabled or monitored on a remote site."""
3272 hosts = set() # type: Set[str]
3273 hosts.update(self.all_configured_realhosts(), self.all_configured_clusters())
3274 return hosts
3276 def _setup_clusters_nodes_cache(self):
3277 for cluster, hosts in clusters.items():
3278 clustername = cluster.split('|', 1)[0]
3279 for name in hosts:
3280 self._clusters_of_cache.setdefault(name, []).append(clustername)
3281 self._nodes_of_cache[clustername] = hosts
3283 def clusters_of(self, hostname):
3284 # type: (str) -> List[str]
3285 """Returns names of cluster hosts the host is a node of"""
3286 return self._clusters_of_cache.get(hostname, [])
3288 # TODO: cleanup None case
3289 def nodes_of(self, hostname):
3290 # type: (str) -> Optional[List[str]]
3291 """Returns the nodes of a cluster. Returns None if no match.
3293 Use host_config.nodes instead of this method to get the node list"""
3294 return self._nodes_of_cache.get(hostname)
3296 def all_active_clusters(self):
3297 # type: () -> Set[str]
3298 """Returns a set of all cluster host names to be handled by this site hosts of other sites or disabled hosts are excluded"""
3299 return self._all_active_clusters
3301 def _get_all_active_clusters(self):
3302 # type: () -> Set[str]
3303 return _filter_active_hosts(self, self.all_configured_clusters())
3305 def all_configured_clusters(self):
3306 # type: () -> Set[str]
3307 """Returns a set of all cluster names
3308 Regardless if currently disabled or monitored on a remote site. Does not return normal hosts.
3310 return self._all_configured_clusters
3312 def _get_all_configured_clusters(self):
3313 # type: () -> Set[str]
3314 return set(strip_tags(clusters.keys()))
3316 # Determine weather a service (found on a physical host) is a clustered
3317 # service and - if yes - return the cluster host of the service. If
3318 # no, returns the hostname of the physical host.
3319 def host_of_clustered_service(self, hostname, servicedesc, part_of_clusters=None):
3320 if part_of_clusters:
3321 the_clusters = part_of_clusters
3322 else:
3323 the_clusters = self.clusters_of(hostname)
3325 if not the_clusters:
3326 return hostname
3328 cluster_mapping = self.service_extra_conf(hostname, servicedesc, clustered_services_mapping)
3329 for cluster in cluster_mapping:
3330 # Check if the host is in this cluster
3331 if cluster in the_clusters:
3332 return cluster
3334 # 1. New style: explicitly assigned services
3335 for cluster, conf in clustered_services_of.iteritems():
3336 nodes = self.nodes_of(cluster)
3337 if not nodes:
3338 raise MKGeneralException(
3339 "Invalid entry clustered_services_of['%s']: %s is not a cluster." % (cluster,
3340 cluster))
3341 if hostname in nodes and \
3342 self.in_boolean_serviceconf_list(hostname, servicedesc, conf):
3343 return cluster
3345 # 1. Old style: clustered_services assumes that each host belong to
3346 # exactly on cluster
3347 if self.in_boolean_serviceconf_list(hostname, servicedesc, clustered_services):
3348 return the_clusters[0]
3350 return hostname
3352 def in_binary_hostlist(self, hostname, conf):
3353 cache = self._in_binary_hostlist_cache
3355 cache_id = id(conf), hostname
3356 try:
3357 return cache[cache_id]
3358 except KeyError:
3359 pass
3361 # if we have just a list of strings just take it as list of hostnames
3362 if conf and isinstance(conf[0], str):
3363 result = hostname in conf
3364 cache[cache_id] = result
3365 else:
3366 for entry in conf:
3367 actual_host_tags = self.tag_list_of_host(hostname)
3368 entry, rule_options = get_rule_options(entry)
3369 if rule_options.get("disabled"):
3370 continue
3372 try:
3373 # Negation via 'NEGATE'
3374 if entry[0] == NEGATE:
3375 entry = entry[1:]
3376 negate = True
3377 else:
3378 negate = False
3379 # entry should be one-tuple or two-tuple. Tuple's elements are
3380 # lists of strings. User might forget comma in one tuple. Then the
3381 # entry is the list itself.
3382 if isinstance(entry, list):
3383 hostlist = entry
3384 tags = []
3385 else:
3386 if len(entry) == 1: # 1-Tuple with list of hosts
3387 hostlist = entry[0]
3388 tags = []
3389 else:
3390 tags, hostlist = entry
3392 if hosttags_match_taglist(actual_host_tags, tags) and \
3393 in_extraconf_hostlist(hostlist, hostname):
3394 cache[cache_id] = not negate
3395 break
3396 except:
3397 # TODO: Fix this too generic catching (+ bad error message)
3398 raise MKGeneralException("Invalid entry '%r' in host configuration list: "
3399 "must be tuple with 1 or 2 entries" % (entry,))
3400 else:
3401 cache[cache_id] = False
3403 return cache[cache_id]
3406 def get_config_cache():
3407 # type: () -> ConfigCache
3408 config_cache = cmk_base.config_cache.get_dict("config_cache")
3409 if not config_cache:
3410 config_cache["cache"] = ConfigCache()
3411 return config_cache["cache"]