Removed unused code
[check_mk.git] / cmk_base / config.py
blob2346751d4880d71297f5180593a2d813b57de3ce
1 #!/usr/bin/env python
2 # -*- encoding: utf-8; py-indent-offset: 4 -*-
3 # +------------------------------------------------------------------+
4 # | ____ _ _ __ __ _ __ |
5 # | / ___| |__ ___ ___| | __ | \/ | |/ / |
6 # | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
7 # | | |___| | | | __/ (__| < | | | | . \ |
8 # | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
9 # | |
10 # | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
11 # +------------------------------------------------------------------+
13 # This file is part of Check_MK.
14 # The official homepage is at http://mathias-kettner.de/check_mk.
16 # check_mk is free software; you can redistribute it and/or modify it
17 # under the terms of the GNU General Public License as published by
18 # the Free Software Foundation in version 2. check_mk is distributed
19 # in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
20 # out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
21 # PARTICULAR PURPOSE. See the GNU General Public License for more de-
22 # tails. You should have received a copy of the GNU General Public
23 # License along with GNU Make; see the file COPYING. If not, write
24 # to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
25 # Boston, MA 02110-1301 USA.
27 from collections import OrderedDict
28 import ast
29 import copy
30 import inspect
31 import marshal
32 import numbers
33 import os
34 import py_compile
35 import struct
36 import sys
37 from typing import Any, Callable, Dict, List, Tuple, Union, Optional # pylint: disable=unused-import
39 import six
41 import cmk.utils.debug
42 import cmk.utils.paths
43 from cmk.utils.regex import regex, is_regex
44 import cmk.utils.translations
45 import cmk.utils.rulesets.tuple_rulesets
46 import cmk.utils.store as store
47 import cmk.utils
48 from cmk.utils.exceptions import MKGeneralException, MKTerminate
50 import cmk_base
51 import cmk_base.console as console
52 import cmk_base.default_config as default_config
53 import cmk_base.check_utils
54 import cmk_base.utils
55 import cmk_base.check_api_utils as check_api_utils
56 import cmk_base.cleanup
57 import cmk_base.piggyback as piggyback
58 from cmk_base.discovered_labels import DiscoveredHostLabelsStore
60 # TODO: Prefix helper functions with "_".
62 # This is mainly needed for pylint to detect all available
63 # configuration options during static analysis. The defaults
64 # are loaded later with load_default_config() again.
65 from cmk_base.default_config import * # pylint: disable=wildcard-import,unused-wildcard-import
67 service_service_levels = [] # type: ignore
68 host_service_levels = [] # type: ignore
71 class TimespecificParamList(list):
72 pass
75 def get_variable_names():
76 """Provides the list of all known configuration variables."""
77 return [k for k in default_config.__dict__ if k[0] != "_"]
80 def get_default_config():
81 """Provides a dictionary containing the Check_MK default configuration"""
82 cfg = {}
83 for key in get_variable_names():
84 value = getattr(default_config, key)
86 if isinstance(value, (dict, list)):
87 value = copy.deepcopy(value)
89 cfg[key] = value
90 return cfg
93 def load_default_config():
94 globals().update(get_default_config())
97 def register(name, default_value):
98 """Register a new configuration variable within Check_MK base."""
99 setattr(default_config, name, default_value)
102 def _add_check_variables_to_default_config():
103 """Add configuration variables registered by checks to config module"""
104 default_config.__dict__.update(get_check_variable_defaults())
107 def _clear_check_variables_from_default_config(variable_names):
108 """Remove previously registered check variables from the config module"""
109 for varname in variable_names:
110 try:
111 delattr(default_config, varname)
112 except AttributeError:
113 pass
116 # Load user configured values of check related configuration variables
117 # into the check module to make it available during checking.
119 # In the same step we remove the check related configuration settings from the
120 # config module because they are not needed there anymore.
122 # And also remove it from the default config (in case it was present)
123 def set_check_variables_for_checks():
124 global_dict = globals()
125 cvn = check_variable_names()
127 check_variables = {}
128 for varname in cvn:
129 check_variables[varname] = global_dict.pop(varname)
131 set_check_variables(check_variables)
132 _clear_check_variables_from_default_config(cvn)
136 # .--Read Config---------------------------------------------------------.
137 # | ____ _ ____ __ _ |
138 # | | _ \ ___ __ _ __| | / ___|___ _ __ / _(_) __ _ |
139 # | | |_) / _ \/ _` |/ _` | | | / _ \| '_ \| |_| |/ _` | |
140 # | | _ < __/ (_| | (_| | | |__| (_) | | | | _| | (_| | |
141 # | |_| \_\___|\__,_|\__,_| \____\___/|_| |_|_| |_|\__, | |
142 # | |___/ |
143 # +----------------------------------------------------------------------+
144 # | Code for reading the configuration files. |
145 # '----------------------------------------------------------------------'
148 def load(with_conf_d=True, validate_hosts=True, exclude_parents_mk=False):
149 _initialize_config()
151 vars_before_config = all_nonfunction_vars()
153 _load_config(with_conf_d, exclude_parents_mk)
154 _transform_mgmt_config_vars_from_140_to_150()
155 _initialize_derived_config_variables()
157 _perform_post_config_loading_actions()
159 if validate_hosts:
160 _verify_non_duplicate_hosts()
162 # Such validation only makes sense when all checks have been loaded
163 if all_checks_loaded():
164 verify_non_invalid_variables(vars_before_config)
165 _verify_no_deprecated_check_rulesets()
167 verify_snmp_communities_type()
170 def load_packed_config():
171 """Load the configuration for the CMK helpers of CMC
173 These files are written by PackedConfig().
175 Should have a result similar to the load() above. With the exception that the
176 check helpers would only need check related config variables.
178 The validations which are performed during load() also don't need to be performed.
180 PackedConfig().load()
183 def _initialize_config():
184 _add_check_variables_to_default_config()
185 load_default_config()
188 def _perform_post_config_loading_actions():
189 """These tasks must be performed after loading the Check_MK base configuration"""
190 # First cleanup things (needed for e.g. reloading the config)
191 cmk_base.config_cache.clear_all()
193 get_config_cache().initialize()
195 # In case the checks are not loaded yet it seems the current mode
196 # is not working with the checks. In this case also don't load the
197 # static checks into the configuration.
198 if any_check_loaded():
199 add_wato_static_checks_to_checks()
200 initialize_check_caches()
201 set_check_variables_for_checks()
204 def _load_config(with_conf_d, exclude_parents_mk):
205 helper_vars = {
206 "FOLDER_PATH": None,
209 global_dict = globals()
210 global_dict.update(helper_vars)
212 for _f in _get_config_file_paths(with_conf_d):
213 # During parent scan mode we must not read in old version of parents.mk!
214 if exclude_parents_mk and _f.endswith("/parents.mk"):
215 continue
217 try:
218 _hosts_before = set(all_hosts)
219 _clusters_before = set(clusters.keys())
221 # Make the config path available as a global variable to
222 # be used within the configuration file
223 if _f.startswith(cmk.utils.paths.check_mk_config_dir + "/"):
224 _file_path = _f[len(cmk.utils.paths.check_mk_config_dir) + 1:]
225 global_dict.update({
226 "FOLDER_PATH": os.path.dirname(_file_path),
228 else:
229 global_dict.update({
230 "FOLDER_PATH": None,
233 execfile(_f, global_dict, global_dict)
235 _new_hosts = set(all_hosts).difference(_hosts_before)
236 _new_clusters = set(clusters.keys()).difference(_clusters_before)
238 set_folder_paths(_new_hosts.union(_new_clusters), _f)
239 except Exception as e:
240 if cmk.utils.debug.enabled():
241 raise
242 elif sys.stderr.isatty():
243 console.error("Cannot read in configuration file %s: %s\n", _f, e)
244 sys.exit(1)
246 # Cleanup global helper vars
247 for helper_var in helper_vars:
248 del global_dict[helper_var]
251 def _transform_mgmt_config_vars_from_140_to_150():
252 #FIXME We have to transform some configuration variables from host attributes
253 # to cmk_base configuration variables because during the migration step from
254 # 1.4.0 to 1.5.0 some config variables are not known in cmk_base. These variables
255 # are 'management_protocol' and 'management_snmp_community'.
256 # Clean this up one day!
257 for hostname, attributes in host_attributes.iteritems():
258 for name, var in [
259 ('management_protocol', management_protocol),
260 ('management_snmp_community', management_snmp_credentials),
262 if attributes.get(name):
263 var.setdefault(hostname, attributes[name])
266 # Create list of all files to be included during configuration loading
267 def _get_config_file_paths(with_conf_d):
268 if with_conf_d:
269 list_of_files = sorted(
270 reduce(lambda a, b: a + b,
271 [["%s/%s" % (d, f)
272 for f in fs
273 if f.endswith(".mk")]
274 for d, _unused_sb, fs in os.walk(cmk.utils.paths.check_mk_config_dir)], []),
275 cmp=cmk.utils.cmp_config_paths)
276 list_of_files = [cmk.utils.paths.main_config_file] + list_of_files
277 else:
278 list_of_files = [cmk.utils.paths.main_config_file]
280 for path in [cmk.utils.paths.final_config_file, cmk.utils.paths.local_config_file]:
281 if os.path.exists(path):
282 list_of_files.append(path)
284 return list_of_files
287 def _initialize_derived_config_variables():
288 global service_service_levels, host_service_levels
289 service_service_levels = extra_service_conf.get("_ec_sl", [])
290 host_service_levels = extra_host_conf.get("_ec_sl", [])
293 def get_derived_config_variable_names():
294 """These variables are computed from other configuration variables and not configured directly.
296 The origin variable (extra_service_conf) should not be exported to the helper config. Only
297 the service levels are needed."""
298 return set(["service_service_levels", "host_service_levels"])
301 def _verify_non_duplicate_hosts():
302 duplicates = duplicate_hosts()
303 if duplicates:
304 # TODO: Raise an exception
305 console.error("Error in configuration: duplicate hosts: %s\n", ", ".join(duplicates))
306 sys.exit(3)
309 # Add WATO-configured explicit checks to (possibly empty) checks
310 # statically defined in checks.
311 def add_wato_static_checks_to_checks():
312 global checks
314 static = []
315 for entries in static_checks.values():
316 for entry in entries:
317 entry, rule_options = get_rule_options(entry)
318 if rule_options.get("disabled"):
319 continue
321 # Parameters are optional
322 if len(entry[0]) == 2:
323 checktype, item = entry[0]
324 params = None
325 else:
326 checktype, item, params = entry[0]
327 if len(entry) == 3:
328 taglist, hostlist = entry[1:3]
329 else:
330 hostlist = entry[1]
331 taglist = []
333 # Do not process manual checks that are related to not existing or have not
334 # loaded check files
335 try:
336 check_plugin_info = check_info[checktype]
337 except KeyError:
338 continue
340 # Make sure, that for dictionary based checks
341 # at least those keys defined in the factory
342 # settings are present in the parameters
343 if isinstance(params, dict):
344 def_levels_varname = check_plugin_info.get("default_levels_variable")
345 if def_levels_varname:
346 for key, value in factory_settings.get(def_levels_varname, {}).items():
347 if key not in params:
348 params[key] = value
350 static.append((taglist, hostlist, checktype, item, params))
352 # Note: We need to reverse the order of the static_checks. This is because
353 # users assume that earlier rules have precedence over later ones. For static
354 # checks that is important if there are two rules for a host with the same
355 # combination of check type and item. When the variable 'checks' is evaluated,
356 # *later* rules have precedence. This is not consistent with the rest, but a
357 # result of this "historic implementation".
358 static.reverse()
360 # Now prepend to checks. That makes that checks variable have precedence
361 # over WATO.
362 checks = static + checks
365 def initialize_check_caches():
366 single_host_checks = cmk_base.config_cache.get_dict("single_host_checks")
367 multi_host_checks = cmk_base.config_cache.get_list("multi_host_checks")
369 for entry in checks:
370 if len(entry) == 4 and isinstance(entry[0], str):
371 single_host_checks.setdefault(entry[0], []).append(entry)
372 else:
373 multi_host_checks.append(entry)
376 def set_folder_paths(new_hosts, filename):
377 if not filename.startswith(cmk.utils.paths.check_mk_config_dir):
378 return
380 path = filename[len(cmk.utils.paths.check_mk_config_dir):]
382 for hostname in strip_tags(new_hosts):
383 host_paths[hostname] = path
386 def verify_non_invalid_variables(vars_before_config):
387 # Check for invalid configuration variables
388 vars_after_config = all_nonfunction_vars()
389 ignored_variables = set([
390 'vars_before_config', 'parts', 'seen_hostnames', 'taggedhost', 'hostname',
391 'service_service_levels', 'host_service_levels'
394 found_invalid = 0
395 for name in vars_after_config:
396 if name not in ignored_variables and name not in vars_before_config:
397 console.error("Invalid configuration variable '%s'\n", name)
398 found_invalid += 1
400 if found_invalid:
401 console.error("--> Found %d invalid variables\n" % found_invalid)
402 console.error("If you use own helper variables, please prefix them with _.\n")
403 sys.exit(1)
406 def verify_snmp_communities_type():
407 # Special handling for certain deprecated variables
408 if isinstance(snmp_communities, dict):
409 console.error("ERROR: snmp_communities cannot be a dict any more.\n")
410 sys.exit(1)
413 def _verify_no_deprecated_check_rulesets():
414 deprecated_rulesets = [
415 ("services", "inventory_services"),
416 ("domino_tasks", "inv_domino_tasks"),
417 ("ps", "inventory_processes"),
418 ("logwatch", "logwatch_patterns"),
420 for check_plugin_name, varname in deprecated_rulesets:
421 check_context = get_check_context(check_plugin_name)
422 if check_context[varname]:
423 console.warning(
424 "Found rules for deprecated ruleset %r. These rules are not applied "
425 "anymore. In case you still need them, you need to migrate them by hand. "
426 "Otherwise you can remove them from your configuration." % varname)
429 def all_nonfunction_vars():
430 return set(
431 [name for name, value in globals().items() if name[0] != '_' and not callable(value)])
434 class PackedConfig(object):
435 """The precompiled host checks and the CMC Check_MK helpers use a
436 "precompiled" part of the Check_MK configuration during runtime.
438 a) They must not use the live config from etc/check_mk during
439 startup. They are only allowed to load the config activated by
440 the user.
442 b) They must not load the whole Check_MK config. Because they only
443 need the options needed for checking
446 # These variables are part of the Check_MK configuration, but are not needed
447 # by the Check_MK keepalive mode, so exclude them from the packed config
448 _skipped_config_variable_names = [
449 "define_contactgroups",
450 "define_hostgroups",
451 "define_servicegroups",
452 "service_contactgroups",
453 "host_contactgroups",
454 "service_groups",
455 "host_groups",
456 "contacts",
457 "timeperiods",
458 "extra_service_conf",
459 "extra_nagios_conf",
462 def __init__(self):
463 super(PackedConfig, self).__init__()
464 self._path = os.path.join(cmk.utils.paths.var_dir, "base", "precompiled_check_config.mk")
466 def save(self):
467 self._write(self._pack())
469 def _pack(self):
470 helper_config = ("#!/usr/bin/env python\n"
471 "# encoding: utf-8\n"
472 "# Created by Check_MK. Dump of the currently active configuration\n\n")
474 # These functions purpose is to filter out hosts which are monitored on different sites
475 active_hosts = all_active_hosts()
476 active_clusters = all_active_clusters()
478 def filter_all_hosts(all_hosts_orig):
479 all_hosts_red = []
480 for host_entry in all_hosts_orig:
481 hostname = host_entry.split("|", 1)[0]
482 if hostname in active_hosts:
483 all_hosts_red.append(host_entry)
484 return all_hosts_red
486 def filter_clusters(clusters_orig):
487 clusters_red = {}
488 for cluster_entry, cluster_nodes in clusters_orig.items():
489 clustername = cluster_entry.split("|", 1)[0]
490 if clustername in active_clusters:
491 clusters_red[cluster_entry] = cluster_nodes
492 return clusters_red
494 def filter_hostname_in_dict(values):
495 values_red = {}
496 for hostname, attributes in values.items():
497 if hostname in active_hosts:
498 values_red[hostname] = attributes
499 return values_red
501 filter_var_functions = {
502 "all_hosts": filter_all_hosts,
503 "clusters": filter_clusters,
504 "host_attributes": filter_hostname_in_dict,
505 "ipaddresses": filter_hostname_in_dict,
506 "ipv6addresses": filter_hostname_in_dict,
507 "explicit_snmp_communities": filter_hostname_in_dict,
508 "hosttags": filter_hostname_in_dict
512 # Add modified Check_MK base settings
515 variable_defaults = get_default_config()
516 derived_config_variable_names = get_derived_config_variable_names()
518 global_variables = globals()
520 for varname in get_variable_names() + list(derived_config_variable_names):
521 if varname in self._skipped_config_variable_names:
522 continue
524 val = global_variables[varname]
526 if varname not in derived_config_variable_names and val == variable_defaults[varname]:
527 continue
529 if not self._packable(varname, val):
530 continue
532 if varname in filter_var_functions:
533 val = filter_var_functions[varname](val)
535 helper_config += "\n%s = %r\n" % (varname, val)
538 # Add modified check specific Check_MK base settings
541 check_variable_defaults = get_check_variable_defaults()
543 for varname, val in get_check_variables().items():
544 if val == check_variable_defaults[varname]:
545 continue
547 if not self._packable(varname, val):
548 continue
550 helper_config += "\n%s = %r\n" % (varname, val)
552 return helper_config
554 def _packable(self, varname, val):
555 """Checks whether or not a variable can be written to the config.mk
556 and read again from it."""
557 if isinstance(val, six.string_types + (int, bool)) or not val:
558 return True
560 try:
561 eval(repr(val))
562 return True
563 except:
564 return False
566 def _write(self, helper_config):
567 store.makedirs(os.path.dirname(self._path))
569 store.save_file(self._path + ".orig", helper_config + "\n")
571 code = compile(helper_config, '<string>', 'exec')
572 with open(self._path + ".compiled", "w") as compiled_file:
573 marshal.dump(code, compiled_file)
575 os.rename(self._path + ".compiled", self._path)
577 def load(self):
578 _initialize_config()
579 exec (marshal.load(open(self._path)), globals())
580 _perform_post_config_loading_actions()
584 # .--Host tags-----------------------------------------------------------.
585 # | _ _ _ _ |
586 # | | | | | ___ ___| |_ | |_ __ _ __ _ ___ |
587 # | | |_| |/ _ \/ __| __| | __/ _` |/ _` / __| |
588 # | | _ | (_) \__ \ |_ | || (_| | (_| \__ \ |
589 # | |_| |_|\___/|___/\__| \__\__,_|\__, |___/ |
590 # | |___/ |
591 # +----------------------------------------------------------------------+
592 # | Helper functions for dealing with host tags |
593 # '----------------------------------------------------------------------'
596 def strip_tags(tagged_hostlist):
597 cache = cmk_base.config_cache.get_dict("strip_tags")
599 cache_id = tuple(tagged_hostlist)
600 try:
601 return cache[cache_id]
602 except KeyError:
603 result = [h.split('|', 1)[0] for h in tagged_hostlist]
604 cache[cache_id] = result
605 return result
609 # .--HostCollections-----------------------------------------------------.
610 # | _ _ _ ____ _ _ _ _ |
611 # || | | | ___ ___| |_ / ___|___ | | | ___ ___| |_(_) ___ _ __ ___ |
612 # || |_| |/ _ \/ __| __| | / _ \| | |/ _ \/ __| __| |/ _ \| '_ \/ __| |
613 # || _ | (_) \__ \ |_| |__| (_) | | | __/ (__| |_| | (_) | | | \__ \ |
614 # ||_| |_|\___/|___/\__|\____\___/|_|_|\___|\___|\__|_|\___/|_| |_|___/ |
615 # | |
616 # +----------------------------------------------------------------------+
617 # | |
618 # '----------------------------------------------------------------------'
621 # Returns a set of all active hosts
622 def all_active_hosts():
623 cache = cmk_base.config_cache.get_set("all_active_hosts")
624 if not cache.is_populated():
625 cache.update(all_active_realhosts(), all_active_clusters())
626 cache.set_populated()
627 return cache
630 # Returns a set of all host names to be handled by this site
631 # hosts of other sitest or disabled hosts are excluded
632 def all_active_realhosts():
633 active_realhosts = cmk_base.config_cache.get_set("active_realhosts")
635 if not active_realhosts.is_populated():
636 active_realhosts.update(filter_active_hosts(all_configured_realhosts()))
637 active_realhosts.set_populated()
639 return active_realhosts
642 # Returns a set of all cluster host names to be handled by
643 # this site hosts of other sitest or disabled hosts are excluded
644 def all_active_clusters():
645 active_clusters = cmk_base.config_cache.get_set("active_clusters")
647 if not active_clusters.is_populated():
648 active_clusters.update(filter_active_hosts(all_configured_clusters()))
649 active_clusters.set_populated()
651 return active_clusters
654 # Returns a set of all hosts, regardless if currently
655 # disabled or monitored on a remote site.
656 def all_configured_hosts():
657 cache = cmk_base.config_cache.get_set("all_configured_hosts")
658 if not cache.is_populated():
659 cache.update(all_configured_realhosts(), all_configured_clusters())
660 cache.set_populated()
661 return cache
664 # Returns a set of all host names, regardless if currently
665 # disabled or monitored on a remote site. Does not return
666 # cluster hosts.
667 def all_configured_realhosts():
668 cache = cmk_base.config_cache.get_set("all_configured_realhosts")
669 if not cache.is_populated():
670 cache.update(strip_tags(all_hosts))
671 cache.set_populated()
672 return cache
675 # Returns a set of all cluster names, regardless if currently
676 # disabled or monitored on a remote site. Does not return
677 # normal hosts.
678 def all_configured_clusters():
679 cache = cmk_base.config_cache.get_set("all_configured_clusters")
680 if not cache.is_populated():
681 cache.update(strip_tags(clusters.keys()))
682 cache.set_populated()
683 return cache
686 # This function should only be used during duplicate host check! It has to work like
687 # all_active_hosts() but with the difference that duplicates are not removed.
688 def all_active_hosts_with_duplicates():
689 # Only available with CEE
690 if "shadow_hosts" in globals():
691 shadow_host_entries = shadow_hosts.keys()
692 else:
693 shadow_host_entries = []
695 return filter_active_hosts(strip_tags(all_hosts) \
696 + strip_tags(clusters.keys()) \
697 + strip_tags(shadow_host_entries), keep_duplicates=True)
700 # Returns a set of active hosts for this site
701 def filter_active_hosts(hostlist, keep_offline_hosts=False, keep_duplicates=False):
702 if only_hosts is None and distributed_wato_site is None:
703 active_hosts = hostlist
705 elif only_hosts is None:
706 active_hosts = [
707 hostname for hostname in hostlist
708 if host_is_member_of_site(hostname, distributed_wato_site)
711 elif distributed_wato_site is None:
712 if keep_offline_hosts:
713 active_hosts = hostlist
714 else:
715 active_hosts = [
716 hostname for hostname in hostlist if in_binary_hostlist(hostname, only_hosts)
719 else:
720 active_hosts = [
721 hostname for hostname in hostlist
722 if (keep_offline_hosts or in_binary_hostlist(hostname, only_hosts)) and
723 host_is_member_of_site(hostname, distributed_wato_site)
726 if keep_duplicates:
727 return active_hosts
729 return set(active_hosts)
732 def duplicate_hosts():
733 seen_hostnames = set([])
734 duplicates = set([])
736 for hostname in all_active_hosts_with_duplicates():
737 if hostname in seen_hostnames:
738 duplicates.add(hostname)
739 else:
740 seen_hostnames.add(hostname)
742 return sorted(list(duplicates))
745 # Returns a list of all hosts which are associated with this site,
746 # but have been removed by the "only_hosts" rule. Normally these
747 # are the hosts which have the tag "offline".
749 # This is not optimized for performance, so use in specific situations.
750 def all_offline_hosts():
751 hostlist = filter_active_hosts(
752 all_configured_realhosts().union(all_configured_clusters()), keep_offline_hosts=True)
754 return [hostname for hostname in hostlist if not in_binary_hostlist(hostname, only_hosts)]
757 def all_configured_offline_hosts():
758 hostlist = all_configured_realhosts().union(all_configured_clusters())
760 return set([hostname for hostname in hostlist if not in_binary_hostlist(hostname, only_hosts)])
764 # .--Hosts---------------------------------------------------------------.
765 # | _ _ _ |
766 # | | | | | ___ ___| |_ ___ |
767 # | | |_| |/ _ \/ __| __/ __| |
768 # | | _ | (_) \__ \ |_\__ \ |
769 # | |_| |_|\___/|___/\__|___/ |
770 # | |
771 # +----------------------------------------------------------------------+
772 # | Helper functions for dealing with hosts. |
773 # '----------------------------------------------------------------------'
776 def host_is_member_of_site(hostname, site):
777 for tag in get_config_cache().get_host_config(hostname).tags:
778 if tag.startswith("site:"):
779 return site == tag[5:]
780 # hosts without a site: tag belong to all sites
781 return True
784 def alias_of(hostname, fallback):
785 aliases = get_config_cache().host_extra_conf(hostname, extra_host_conf.get("alias", []))
786 if len(aliases) == 0:
787 if fallback:
788 return fallback
790 return hostname
792 return aliases[0]
795 def get_additional_ipaddresses_of(hostname):
796 #TODO Regarding the following configuration variables from WATO
797 # there's no inheritance, thus we use 'host_attributes'.
798 # Better would be to use cmk_base configuration variables,
799 # eg. like 'management_protocol'.
800 return (host_attributes.get(hostname, {}).get("additional_ipv4addresses", []),
801 host_attributes.get(hostname, {}).get("additional_ipv6addresses", []))
804 def parents_of(hostname):
805 par = get_config_cache().host_extra_conf(hostname, parents)
806 # Use only those parents which are defined and active in
807 # all_hosts.
808 used_parents = []
809 for p in par:
810 ps = p.split(",")
811 for pss in ps:
812 if pss in all_active_realhosts():
813 used_parents.append(pss)
814 return used_parents
817 # If host is node of one or more clusters, return a list of the cluster host names.
818 # If not, return an empty list.
819 # TODO: Replace call sites with HostConfig access and remove this
820 def clusters_of(hostname):
821 return get_config_cache().get_host_config(hostname).part_of_clusters
825 # IPv4/IPv6
829 # TODO: Replace call sites with HostConfig access and remove this
830 def is_ipv6_primary(hostname):
831 return get_config_cache().get_host_config(hostname).is_ipv6_primary
834 # TODO: Replace call sites with HostConfig access and remove this
835 def is_ipv4v6_host(hostname):
836 return get_config_cache().get_host_config(hostname).is_ipv4v6_host
839 # TODO: Replace call sites with HostConfig access and remove this
840 def is_ipv6_host(hostname):
841 return get_config_cache().get_host_config(hostname).is_ipv6_host
844 # TODO: Replace call sites with HostConfig access and remove this
845 def is_ipv4_host(hostname):
846 return get_config_cache().get_host_config(hostname).is_ipv4_host
849 # TODO: Replace call sites with HostConfig access and remove this
850 def is_no_ip_host(hostname):
851 return get_config_cache().get_host_config(hostname).is_no_ip_host
855 # Management board
859 def management_address_of(hostname):
860 attributes_of_host = host_attributes.get(hostname, {})
861 if attributes_of_host.get("management_address"):
862 return attributes_of_host["management_address"]
864 return ipaddresses.get(hostname)
867 def management_credentials_of(hostname):
868 protocol = get_config_cache().get_host_config(hostname).management_protocol
869 if protocol == "snmp":
870 credentials_variable, default_value = management_snmp_credentials, snmp_default_community
871 elif protocol == "ipmi":
872 credentials_variable, default_value = management_ipmi_credentials, None
873 elif protocol is None:
874 return None
875 else:
876 raise NotImplementedError()
878 # First try to use the explicit configuration of the host
879 # (set directly for a host or via folder inheritance in WATO)
880 try:
881 return credentials_variable[hostname]
882 except KeyError:
883 pass
885 # If a rule matches, use the first rule for the management board protocol of the host
886 rule_settings = get_config_cache().host_extra_conf(hostname, management_board_config)
887 for rule_protocol, credentials in rule_settings:
888 if rule_protocol == protocol:
889 return credentials
891 return default_value
895 # Agent communication
899 def agent_port_of(hostname):
900 ports = get_config_cache().host_extra_conf(hostname, agent_ports)
901 if len(ports) == 0:
902 return agent_port
904 return ports[0]
907 def tcp_connect_timeout_of(hostname):
908 timeouts = get_config_cache().host_extra_conf(hostname, tcp_connect_timeouts)
909 if len(timeouts) == 0:
910 return tcp_connect_timeout
912 return timeouts[0]
915 def agent_encryption_of(hostname):
916 settings = get_config_cache().host_extra_conf(hostname, agent_encryption)
917 if settings:
918 return settings[0]
920 return {'use_regular': 'disable', 'use_realtime': 'enforce'}
923 def agent_target_version(hostname):
924 agent_target_versions = get_config_cache().host_extra_conf(hostname,
925 check_mk_agent_target_versions)
926 if agent_target_versions:
927 spec = agent_target_versions[0]
928 if spec == "ignore":
929 return None
930 elif spec == "site":
931 return cmk.__version__
932 elif isinstance(spec, str):
933 # Compatibility to old value specification format (a single version string)
934 return spec
935 elif spec[0] == 'specific':
936 return spec[1]
938 return spec # return the whole spec in case of an "at least version" config
942 # Explicit custom variables
944 def get_explicit_service_custom_variables(hostname, description):
945 try:
946 return explicit_service_custom_variables[(hostname, description)]
947 except KeyError:
948 return {}
952 # SNMP
956 # Determine SNMP community for a specific host. It the host is found
957 # int the map snmp_communities, that community is returned. Otherwise
958 # the snmp_default_community is returned (wich is preset with
959 # "public", but can be overridden in main.mk
960 def snmp_credentials_of(hostname):
961 try:
962 return explicit_snmp_communities[hostname]
963 except KeyError:
964 pass
966 communities = get_config_cache().host_extra_conf(hostname, snmp_communities)
967 if len(communities) > 0:
968 return communities[0]
970 # nothing configured for this host -> use default
971 return snmp_default_community
974 def snmp_character_encoding_of(hostname):
975 entries = get_config_cache().host_extra_conf(hostname, snmp_character_encodings)
976 if len(entries) > 0:
977 return entries[0]
980 def snmp_timing_of(hostname):
981 timing = get_config_cache().host_extra_conf(hostname, snmp_timing)
982 if len(timing) > 0:
983 return timing[0]
984 return {}
987 def snmpv3_contexts_of(hostname):
988 return get_config_cache().host_extra_conf(hostname, snmpv3_contexts)
991 def oid_range_limits_of(hostname):
992 return get_config_cache().host_extra_conf(hostname, snmp_limit_oid_range)
995 def snmp_port_of(hostname):
996 # type: (str) -> int
997 ports = get_config_cache().host_extra_conf(hostname, snmp_ports)
998 if len(ports) == 0:
999 return 161
1000 return ports[0]
1003 def is_bulkwalk_host(hostname):
1004 # type: (str) -> bool
1005 if bulkwalk_hosts:
1006 return in_binary_hostlist(hostname, bulkwalk_hosts)
1008 return False
1011 def bulk_walk_size_of(hostname):
1012 bulk_sizes = get_config_cache().host_extra_conf(hostname, snmp_bulk_size)
1013 if not bulk_sizes:
1014 return 10
1016 return bulk_sizes[0]
1019 def is_snmpv2or3_without_bulkwalk_host(hostname):
1020 return in_binary_hostlist(hostname, snmpv2c_hosts)
1023 # TODO: Replace call sites with HostConfig access and remove this
1024 def is_usewalk_host(hostname):
1025 return get_config_cache().get_host_config(hostname).is_usewalk_host
1028 def is_inline_snmp_host(hostname):
1029 # TODO: Better use "inline_snmp" once we have moved the code to an own module
1030 has_inline_snmp = "netsnmp" in sys.modules
1031 return has_inline_snmp and use_inline_snmp \
1032 and not in_binary_hostlist(hostname, non_inline_snmp_hosts)
1036 # Groups
1040 def hostgroups_of(hostname):
1041 return get_config_cache().host_extra_conf(hostname, host_groups)
1044 def contactgroups_of(hostname):
1045 cgrs = []
1047 # host_contactgroups may take single values as well as
1048 # lists as item value. Of all list entries only the first
1049 # one is used. The single-contact-groups entries are all
1050 # recognized.
1051 first_list = True
1052 for entry in get_config_cache().host_extra_conf(hostname, host_contactgroups):
1053 if isinstance(entry, list) and first_list:
1054 cgrs += entry
1055 first_list = False
1056 else:
1057 cgrs.append(entry)
1059 if monitoring_core == "nagios" and enable_rulebased_notifications:
1060 cgrs.append("check-mk-notify")
1062 return list(set(cgrs))
1066 # Misc
1070 def exit_code_spec(hostname, data_source_id=None):
1071 spec = {}
1072 specs = get_config_cache().host_extra_conf(hostname, check_mk_exit_status)
1073 for entry in specs[::-1]:
1074 spec.update(entry)
1075 return _get_exit_code_spec(spec, data_source_id)
1078 def _get_exit_code_spec(spec, data_source_id):
1079 if data_source_id is not None:
1080 try:
1081 return spec["individual"][data_source_id]
1082 except KeyError:
1083 pass
1085 try:
1086 return spec["overall"]
1087 except KeyError:
1088 pass
1090 # Old configuration format
1091 return spec
1094 def check_period_of(hostname, service):
1095 periods = get_config_cache().service_extra_conf(hostname, service, check_periods)
1096 if periods:
1097 period = periods[0]
1098 if period == "24X7":
1099 return None
1101 return period
1103 return None
1106 def check_interval_of(hostname, section_name):
1107 if not cmk_base.cmk_base.check_utils.is_snmp_check(section_name):
1108 return # no values at all for non snmp checks
1110 # Previous to 1.5 "match" could be a check name (including subchecks) instead of
1111 # only main check names -> section names. This has been cleaned up, but we still
1112 # need to be compatible. Strip of the sub check part of "match".
1113 for match, minutes in get_config_cache().host_extra_conf(hostname, snmp_check_interval):
1114 if match is None or match.split(".")[0] == section_name:
1115 return minutes # use first match
1119 # .--Cluster-------------------------------------------------------------.
1120 # | ____ _ _ |
1121 # | / ___| |_ _ ___| |_ ___ _ __ |
1122 # | | | | | | | / __| __/ _ \ '__| |
1123 # | | |___| | |_| \__ \ || __/ | |
1124 # | \____|_|\__,_|___/\__\___|_| |
1125 # | |
1126 # +----------------------------------------------------------------------+
1127 # | Code dealing with clusters (virtual hosts that are used to deal with |
1128 # | services that can move between physical nodes. |
1129 # '----------------------------------------------------------------------'
1132 # Checks whether or not the given host is a cluster host
1133 def is_cluster(hostname):
1134 # all_configured_clusters() needs to be used, because this function affects
1135 # the agent bakery, which needs all configured hosts instead of just the hosts
1136 # of this site
1137 return hostname in all_configured_clusters()
1140 # Returns the nodes of a cluster, or None if hostname is not a cluster
1141 def nodes_of(hostname):
1142 return get_config_cache().nodes_of(hostname)
1145 # Determine weather a service (found on a physical host) is a clustered
1146 # service and - if yes - return the cluster host of the service. If
1147 # no, returns the hostname of the physical host.
1148 def host_of_clustered_service(hostname, servicedesc, part_of_clusters=None):
1149 return get_config_cache().host_of_clustered_service(
1150 hostname, servicedesc, part_of_clusters=part_of_clusters)
1154 # .--Services------------------------------------------------------------.
1155 # | ____ _ |
1156 # | / ___| ___ _ ____ _(_) ___ ___ ___ |
1157 # | \___ \ / _ \ '__\ \ / / |/ __/ _ \/ __| |
1158 # | ___) | __/ | \ V /| | (_| __/\__ \ |
1159 # | |____/ \___|_| \_/ |_|\___\___||___/ |
1160 # | |
1161 # +----------------------------------------------------------------------+
1162 # | Service related helper functions |
1163 # '----------------------------------------------------------------------'
1165 # Renaming of service descriptions while keeping backward compatibility with
1166 # existing installations.
1167 # Synchronize with htdocs/wato.py and plugins/wato/check_mk_configuration.py!
1170 # Cleanup! .. some day
1171 def _get_old_cmciii_temp_description(item):
1172 if "Temperature" in item:
1173 return False, item # old item format, no conversion
1175 parts = item.split(" ")
1176 if parts[0] == "Ambient":
1177 return False, "%s Temperature" % parts[1]
1179 elif len(parts) == 2:
1180 return False, "%s %s.Temperature" % (parts[1], parts[0])
1182 else:
1183 if parts[1] == "LCP":
1184 parts[1] = "Liquid_Cooling_Package"
1185 return False, "%s %s.%s-Temperature" % (parts[1], parts[0], parts[2])
1188 _old_service_descriptions = {
1189 "df": "fs_%s",
1190 "df_netapp": "fs_%s",
1191 "df_netapp32": "fs_%s",
1192 "esx_vsphere_datastores": "fs_%s",
1193 "hr_fs": "fs_%s",
1194 "vms_diskstat.df": "fs_%s",
1195 "zfsget": "fs_%s",
1196 "ps": "proc_%s",
1197 "ps.perf": "proc_%s",
1198 "wmic_process": "proc_%s",
1199 "services": "service_%s",
1200 "logwatch": "LOG %s",
1201 "logwatch.groups": "LOG %s",
1202 "hyperv_vm": "hyperv_vms",
1203 "ibm_svc_mdiskgrp": "MDiskGrp %s",
1204 "ibm_svc_system": "IBM SVC Info",
1205 "ibm_svc_systemstats.diskio": "IBM SVC Throughput %s Total",
1206 "ibm_svc_systemstats.iops": "IBM SVC IOPS %s Total",
1207 "ibm_svc_systemstats.disk_latency": "IBM SVC Latency %s Total",
1208 "ibm_svc_systemstats.cache": "IBM SVC Cache Total",
1209 "mknotifyd": "Notification Spooler %s",
1210 "mknotifyd.connection": "Notification Connection %s",
1211 "casa_cpu_temp": "Temperature %s",
1212 "cmciii.temp": _get_old_cmciii_temp_description,
1213 "cmciii.psm_current": "%s",
1214 "cmciii_lcp_airin": "LCP Fanunit Air IN",
1215 "cmciii_lcp_airout": "LCP Fanunit Air OUT",
1216 "cmciii_lcp_water": "LCP Fanunit Water %s",
1217 "etherbox.temp": "Sensor %s",
1218 # While using the old description, don't append the item, even when discovered
1219 # with the new check which creates an item.
1220 "liebert_bat_temp": lambda item: (False, "Battery Temp"),
1221 "nvidia.temp": "Temperature NVIDIA %s",
1222 "ups_bat_temp": "Temperature Battery %s",
1223 "innovaphone_temp": lambda item: (False, "Temperature"),
1224 "enterasys_temp": lambda item: (False, "Temperature"),
1225 "raritan_emx": "Rack %s",
1226 "raritan_pdu_inlet": "Input Phase %s",
1227 "postfix_mailq": lambda item: (False, "Postfix Queue"),
1228 "nullmailer_mailq": lambda item: (False, "Nullmailer Queue"),
1229 "barracuda_mailqueues": lambda item: (False, "Mail Queue"),
1230 "qmail_stats": lambda item: (False, "Qmail Queue"),
1231 "mssql_backup": "%s Backup",
1232 "mssql_counters.cache_hits": "%s",
1233 "mssql_counters.transactions": "%s Transactions",
1234 "mssql_counters.locks": "%s Locks",
1235 "mssql_counters.sqlstats": "%s",
1236 "mssql_counters.pageactivity": "%s Page Activity",
1237 "mssql_counters.locks_per_batch": "%s Locks per Batch",
1238 "mssql_counters.file_sizes": "%s File Sizes",
1239 "mssql_databases": "%s Database",
1240 "mssql_datafiles": "Datafile %s",
1241 "mssql_tablespaces": "%s Sizes",
1242 "mssql_transactionlogs": "Transactionlog %s",
1243 "mssql_versions": "%s Version",
1244 "mssql_blocked_sessions": lambda item: (False, "MSSQL Blocked Sessions"),
1248 def service_description(hostname, check_plugin_name, item):
1249 if check_plugin_name not in check_info:
1250 if item:
1251 return "Unimplemented check %s / %s" % (check_plugin_name, item)
1252 return "Unimplemented check %s" % check_plugin_name
1254 # use user-supplied service description, if available
1255 add_item = True
1256 descr_format = service_descriptions.get(check_plugin_name)
1257 if not descr_format:
1258 # handle renaming for backward compatibility
1259 if check_plugin_name in _old_service_descriptions and \
1260 check_plugin_name not in use_new_descriptions_for:
1262 # Can be a fucntion to generate the old description more flexible.
1263 old_descr = _old_service_descriptions[check_plugin_name]
1264 if callable(old_descr):
1265 add_item, descr_format = old_descr(item)
1266 else:
1267 descr_format = old_descr
1269 else:
1270 descr_format = check_info[check_plugin_name]["service_description"]
1272 if isinstance(descr_format, str):
1273 descr_format = descr_format.decode("utf-8")
1275 # Note: we strip the service description (remove spaces).
1276 # One check defines "Pages %s" as a description, but the item
1277 # can by empty in some cases. Nagios silently drops leading
1278 # and trailing spaces in the configuration file.
1279 if add_item and isinstance(item, six.string_types + (numbers.Integral,)):
1280 if "%s" not in descr_format:
1281 descr_format += " %s"
1282 descr = descr_format % (item,)
1283 else:
1284 descr = descr_format
1286 if "%s" in descr:
1287 raise MKGeneralException("Found '%%s' in service description (Host: %s, Check type: %s, Item: %s). "
1288 "Please try to rediscover the service to fix this issue." % \
1289 (hostname, check_plugin_name, item))
1291 return get_final_service_description(hostname, descr)
1294 _old_active_check_service_descriptions = {
1295 "http": lambda params: (params[0][1:] if params[0].startswith("^") else "HTTP %s" % params[0])
1299 def active_check_service_description(hostname, active_check_name, params):
1300 if active_check_name not in active_check_info:
1301 return "Unimplemented check %s" % active_check_name
1303 if (active_check_name in _old_active_check_service_descriptions and
1304 active_check_name not in use_new_descriptions_for):
1305 description = _old_active_check_service_descriptions[active_check_name](params)
1306 else:
1307 act_info = active_check_info[active_check_name]
1308 description = act_info["service_description"](params)
1310 description = description.replace('$HOSTNAME$', hostname)
1312 return get_final_service_description(hostname, description)
1315 def get_final_service_description(hostname, description):
1316 translations = get_service_translations(hostname)
1317 if translations:
1318 # Translate
1319 description = cmk.utils.translations.translate_service_description(
1320 translations, description)
1322 # Sanitize; Remove illegal characters from a service description
1323 description = description.strip()
1324 cache = cmk_base.config_cache.get_dict("final_service_description")
1325 try:
1326 new_description = cache[description]
1327 except KeyError:
1328 new_description = "".join(
1329 [c for c in description if c not in nagios_illegal_chars]).rstrip("\\")
1330 cache[description] = new_description
1332 return new_description
1335 def service_ignored(hostname, check_plugin_name, description):
1336 if check_plugin_name and check_plugin_name in ignored_checktypes:
1337 return True
1339 if check_plugin_name and _checktype_ignored_for_host(hostname, check_plugin_name):
1340 return True
1342 if description is not None \
1343 and get_config_cache().in_boolean_serviceconf_list(hostname, description, ignored_services):
1344 return True
1346 return False
1349 def _checktype_ignored_for_host(host, checktype):
1350 if checktype in ignored_checktypes:
1351 return True
1352 ignored = get_config_cache().host_extra_conf(host, ignored_checks)
1353 for e in ignored:
1354 if checktype == e or (isinstance(e, list) and checktype in e):
1355 return True
1356 return False
1359 # TODO: Make this use the generic "rulesets" functions
1360 # a) This function has never been configurable via WATO (see https://mathias-kettner.de/checkmk_service_dependencies.html)
1361 # b) It only affects the Nagios core - CMC does not implement service dependencies
1362 # c) This function implements some specific regex replacing match+replace which makes it incompatible to
1363 # regular service rulesets. Therefore service_extra_conf() can not easily be used :-/
1364 def service_depends_on(hostname, servicedesc):
1365 """Return a list of services this services depends upon"""
1366 deps = []
1367 config_cache = get_config_cache()
1368 for entry in service_dependencies:
1369 entry, rule_options = get_rule_options(entry)
1370 if rule_options.get("disabled"):
1371 continue
1373 if len(entry) == 3:
1374 depname, hostlist, patternlist = entry
1375 tags = []
1376 elif len(entry) == 4:
1377 depname, tags, hostlist, patternlist = entry
1378 else:
1379 raise MKGeneralException("Invalid entry '%r' in service dependencies: "
1380 "must have 3 or 4 entries" % entry)
1382 if hosttags_match_taglist(config_cache.tag_list_of_host(hostname), tags) and \
1383 in_extraconf_hostlist(hostlist, hostname):
1384 for pattern in patternlist:
1385 matchobject = regex(pattern).search(servicedesc)
1386 if matchobject:
1387 try:
1388 item = matchobject.groups()[-1]
1389 deps.append(depname % item)
1390 except:
1391 deps.append(depname)
1392 return deps
1396 # .--Misc Helpers--------------------------------------------------------.
1397 # | __ __ _ _ _ _ |
1398 # | | \/ (_)___ ___ | | | | ___| |_ __ ___ _ __ ___ |
1399 # | | |\/| | / __|/ __| | |_| |/ _ \ | '_ \ / _ \ '__/ __| |
1400 # | | | | | \__ \ (__ | _ | __/ | |_) | __/ | \__ \ |
1401 # | |_| |_|_|___/\___| |_| |_|\___|_| .__/ \___|_| |___/ |
1402 # | |_| |
1403 # +----------------------------------------------------------------------+
1404 # | Different helper functions |
1405 # '----------------------------------------------------------------------'
1408 def is_cmc():
1409 """Whether or not the site is currently configured to use the Microcore."""
1410 return monitoring_core == "cmc"
1413 def decode_incoming_string(s, encoding="utf-8"):
1414 try:
1415 return s.decode(encoding)
1416 except:
1417 return s.decode(fallback_agent_output_encoding)
1420 def translate_piggyback_host(sourcehost, backedhost):
1421 translation = _get_piggyback_translations(sourcehost)
1423 # To make it possible to match umlauts we need to change the hostname
1424 # to a unicode string which can then be matched with regexes etc.
1425 # We assume the incoming name is correctly encoded in UTF-8
1426 backedhost = decode_incoming_string(backedhost)
1428 translated = cmk.utils.translations.translate_hostname(translation, backedhost)
1430 return translated.encode('utf-8') # change back to UTF-8 encoded string
1433 def _get_piggyback_translations(hostname):
1434 """Get a dict that specifies the actions to be done during the hostname translation"""
1435 rules = get_config_cache().host_extra_conf(hostname, piggyback_translation)
1436 translations = {}
1437 for rule in rules[::-1]:
1438 translations.update(rule)
1439 return translations
1442 def get_service_translations(hostname):
1443 translations_cache = cmk_base.config_cache.get_dict("service_description_translations")
1444 if hostname in translations_cache:
1445 return translations_cache[hostname]
1447 rules = get_config_cache().host_extra_conf(hostname, service_description_translation)
1448 translations = {}
1449 for rule in rules[::-1]:
1450 for k, v in rule.items():
1451 if isinstance(v, list):
1452 translations.setdefault(k, set())
1453 translations[k] |= set(v)
1454 else:
1455 translations[k] = v
1457 translations_cache[hostname] = translations
1458 return translations
1461 def prepare_check_command(command_spec, hostname, description):
1462 """Prepares a check command for execution by Check_MK.
1464 This function either accepts a string or a list of arguments as
1465 command_spec. In case a list is given it quotes the single elements. It
1466 also prepares password store entries for the command line. These entries
1467 will be completed by the executed program later to get the password from
1468 the password store.
1470 if isinstance(command_spec, six.string_types):
1471 return command_spec
1473 if not isinstance(command_spec, list):
1474 raise NotImplementedError()
1476 passwords, formated = [], []
1477 for arg in command_spec:
1478 arg_type = type(arg)
1480 if arg_type in [int, float]:
1481 formated.append("%s" % arg)
1483 elif arg_type in [str, unicode]:
1484 formated.append(cmk_base.utils.quote_shell_string(arg))
1486 elif arg_type == tuple and len(arg) == 3:
1487 pw_ident, preformated_arg = arg[1:]
1488 try:
1489 password = stored_passwords[pw_ident]["password"]
1490 except KeyError:
1491 if hostname and description:
1492 descr = " used by service \"%s\" on host \"%s\"" % (description, hostname)
1493 elif hostname:
1494 descr = " used by host host \"%s\"" % (hostname)
1495 else:
1496 descr = ""
1498 console.warning(
1499 "The stored password \"%s\"%s does not exist (anymore)." % (pw_ident, descr))
1500 password = "%%%"
1502 pw_start_index = str(preformated_arg.index("%s"))
1503 formated.append(
1504 cmk_base.utils.quote_shell_string(preformated_arg % ("*" * len(password))))
1505 passwords.append((str(len(formated)), pw_start_index, pw_ident))
1507 else:
1508 raise MKGeneralException("Invalid argument for command line: %r" % (arg,))
1510 if passwords:
1511 formated = ["--pwstore=%s" % ",".join(["@".join(p) for p in passwords])] + formated
1513 return " ".join(formated)
1516 def get_http_proxy(http_proxy):
1517 # type: (Tuple) -> Optional[str]
1518 """Returns proxy URL to be used for HTTP requests
1520 Pass a value configured by the user using the HTTPProxyReference valuespec to this function
1521 and you will get back ether a proxy URL, an empty string to enforce no proxy usage or None
1522 to use the proxy configuration from the process environment.
1524 if not isinstance(http_proxy, tuple):
1525 return None
1527 proxy_type, value = http_proxy
1529 if proxy_type == "environment":
1530 return None
1532 if proxy_type == "global":
1533 return http_proxies.get(value, {}).get("proxy_url", None)
1535 if proxy_type == "url":
1536 return value
1538 if proxy_type == "no_proxy":
1539 return ""
1541 return None
1545 # .--Host matching-------------------------------------------------------.
1546 # | _ _ _ _ _ _ |
1547 # | | | | | ___ ___| |_ _ __ ___ __ _| |_ ___| |__ (_)_ __ __ _ |
1548 # | | |_| |/ _ \/ __| __| | '_ ` _ \ / _` | __/ __| '_ \| | '_ \ / _` | |
1549 # | | _ | (_) \__ \ |_ | | | | | | (_| | || (__| | | | | | | | (_| | |
1550 # | |_| |_|\___/|___/\__| |_| |_| |_|\__,_|\__\___|_| |_|_|_| |_|\__, | |
1551 # | |___/ |
1552 # +----------------------------------------------------------------------+
1553 # | Code for calculating the host condition matching of rules |
1554 # '----------------------------------------------------------------------'
1557 def all_matching_hosts(tags, hostlist, with_foreign_hosts):
1558 return get_config_cache().all_matching_hosts(tags, hostlist, with_foreign_hosts)
1561 def in_extraconf_hostlist(hostlist, hostname):
1562 """Whether or not the given host matches the hostlist.
1564 Entries in list are hostnames that must equal the hostname.
1565 Expressions beginning with ! are negated: if they match,
1566 the item is excluded from the list.
1568 Expressions beginning with ~ are treated as regular expression.
1569 Also the three special tags '@all', '@clusters', '@physical'
1570 are allowed.
1573 # Migration help: print error if old format appears in config file
1574 # FIXME: When can this be removed?
1575 try:
1576 if hostlist[0] == "":
1577 raise MKGeneralException('Invalid empty entry [ "" ] in configuration')
1578 except IndexError:
1579 pass # Empty list, no problem.
1581 for hostentry in hostlist:
1582 if hostentry == '':
1583 raise MKGeneralException('Empty hostname in host list %r' % hostlist)
1584 negate = False
1585 use_regex = False
1586 if hostentry[0] == '@':
1587 if hostentry == '@all':
1588 return True
1589 ic = is_cluster(hostname)
1590 if hostentry == '@cluster' and ic:
1591 return True
1592 elif hostentry == '@physical' and not ic:
1593 return True
1595 # Allow negation of hostentry with prefix '!'
1596 else:
1597 if hostentry[0] == '!':
1598 hostentry = hostentry[1:]
1599 negate = True
1601 # Allow regex with prefix '~'
1602 if hostentry[0] == '~':
1603 hostentry = hostentry[1:]
1604 use_regex = True
1606 try:
1607 if not use_regex and hostname == hostentry:
1608 return not negate
1609 # Handle Regex. Note: hostname == True -> generic unknown host
1610 elif use_regex and hostname != True:
1611 if regex(hostentry).match(hostname) is not None:
1612 return not negate
1613 except MKGeneralException:
1614 if cmk.utils.debug.enabled():
1615 raise
1617 return False
1620 def in_binary_hostlist(hostname, conf):
1621 return get_config_cache().in_binary_hostlist(hostname, conf)
1624 def parse_host_rule(rule):
1625 rule, rule_options = get_rule_options(rule)
1627 num_elements = len(rule)
1628 if num_elements == 2:
1629 item, hostlist = rule
1630 tags = []
1631 elif num_elements == 3:
1632 item, tags, hostlist = rule
1633 else:
1634 raise MKGeneralException("Invalid entry '%r' in host configuration list: must "
1635 "have 2 or 3 entries" % (rule,))
1637 return item, tags, hostlist, rule_options
1640 def get_rule_options(entry):
1641 """Get the options from a rule.
1643 Pick out the option element of a rule. Currently the options "disabled"
1644 and "comments" are being honored."""
1645 if isinstance(entry[-1], dict):
1646 return entry[:-1], entry[-1]
1648 return entry, {}
1651 def hosttags_match_taglist(hosttags, required_tags):
1652 """Check if a host fulfills the requirements of a tag list.
1654 The host must have all tags in the list, except
1655 for those negated with '!'. Those the host must *not* have!
1656 A trailing + means a prefix match."""
1657 for tag in required_tags:
1658 negate, tag = _parse_negated(tag)
1659 if tag and tag[-1] == '+':
1660 tag = tag[:-1]
1661 matches = False
1662 for t in hosttags:
1663 if t.startswith(tag):
1664 matches = True
1665 break
1667 else:
1668 matches = tag in hosttags
1670 if matches == negate:
1671 return False
1673 return True
1676 def _parse_negated(pattern):
1677 # Allow negation of pattern with prefix '!'
1678 try:
1679 negate = pattern[0] == '!'
1680 if negate:
1681 pattern = pattern[1:]
1682 except IndexError:
1683 negate = False
1685 return negate, pattern
1688 # Converts a regex pattern which is used to e.g. match services within Check_MK
1689 # to a function reference to a matching function which takes one parameter to
1690 # perform the matching and returns a two item tuple where the first element
1691 # tells wether or not the pattern is negated and the second element the outcome
1692 # of the match.
1693 # This function tries to parse the pattern and return different kind of matching
1694 # functions which can then be performed faster than just using the regex match.
1695 def _convert_pattern(pattern):
1696 def is_infix_string_search(pattern):
1697 return pattern.startswith('.*') and not is_regex(pattern[2:])
1699 def is_exact_match(pattern):
1700 return pattern[-1] == '$' and not is_regex(pattern[:-1])
1702 def is_prefix_match(pattern):
1703 return pattern[-2:] == '.*' and not is_regex(pattern[:-2])
1705 if pattern == '':
1706 return False, lambda txt: True # empty patterns match always
1708 negate, pattern = _parse_negated(pattern)
1710 if is_exact_match(pattern):
1711 # Exact string match
1712 return negate, lambda txt: pattern[:-1] == txt
1714 elif is_infix_string_search(pattern):
1715 # Using regex to search a substring within text
1716 return negate, lambda txt: pattern[2:] in txt
1718 elif is_prefix_match(pattern):
1719 # prefix match with tailing .*
1720 pattern = pattern[:-2]
1721 return negate, lambda txt: txt[:len(pattern)] == pattern
1723 elif is_regex(pattern):
1724 # Non specific regex. Use real prefix regex matching
1725 return negate, lambda txt: regex(pattern).match(txt) is not None
1727 # prefix match without any regex chars
1728 return negate, lambda txt: txt[:len(pattern)] == pattern
1731 def _convert_pattern_list(patterns):
1732 return tuple([_convert_pattern(p) for p in patterns])
1735 # Slow variant of checking wether a service is matched by a list
1736 # of regexes - used e.g. by cmk --notify
1737 def in_extraconf_servicelist(servicelist, service):
1738 return _in_servicematcher_list(_convert_pattern_list(servicelist), service)
1741 def _in_servicematcher_list(service_matchers, item):
1742 for negate, func in service_matchers:
1743 result = func(item)
1744 if result:
1745 return not negate
1747 # no match in list -> negative answer
1748 return False
1752 # .--Constants-----------------------------------------------------------.
1753 # | ____ _ _ |
1754 # | / ___|___ _ __ ___| |_ __ _ _ __ | |_ ___ |
1755 # | | | / _ \| '_ \/ __| __/ _` | '_ \| __/ __| |
1756 # | | |__| (_) | | | \__ \ || (_| | | | | |_\__ \ |
1757 # | \____\___/|_| |_|___/\__\__,_|_| |_|\__|___/ |
1758 # | |
1759 # +----------------------------------------------------------------------+
1760 # | Some constants to be used in the configuration and at other places |
1761 # '----------------------------------------------------------------------'
1763 # Conveniance macros for legacy tuple based host and service rules
1764 # TODO: Deprecate these in a gentle way
1765 PHYSICAL_HOSTS = cmk.utils.rulesets.tuple_rulesets.PHYSICAL_HOSTS
1766 CLUSTER_HOSTS = cmk.utils.rulesets.tuple_rulesets.CLUSTER_HOSTS
1767 ALL_HOSTS = cmk.utils.rulesets.tuple_rulesets.ALL_HOSTS
1768 ALL_SERVICES = cmk.utils.rulesets.tuple_rulesets.ALL_SERVICES
1769 NEGATE = cmk.utils.rulesets.tuple_rulesets.NEGATE
1771 # TODO: Cleanup access to check_info[] -> replace it by different function calls
1772 # like for example check_exists(...)
1774 # BE AWARE: sync these global data structures with
1775 # _initialize_data_structures()
1776 # TODO: Refactor this.
1778 # The checks are loaded into this dictionary. Each check
1779 _check_contexts = {} # type: Dict[str, Any]
1780 # has a separate sub-dictionary, named by the check name.
1781 # It is populated with the includes and the check itself.
1783 # The following data structures will be filled by the checks
1784 # all known checks
1785 check_info = {} # type: Dict[str, Union[Tuple[Any], Dict[str, Any]]]
1786 # library files needed by checks
1787 check_includes = {} # type: Dict[str, List[Any]]
1788 # optional functions for parameter precompilation
1789 precompile_params = {} # type: Dict[str, Callable[[str, str, Dict[str, Any]], Any]]
1790 # dictionary-configured checks declare their default level variables here
1791 check_default_levels = {} # type: Dict[str, Any]
1792 # factory settings for dictionary-configured checks
1793 factory_settings = {} # type: Dict[str, Dict[str, Any]]
1794 # variables (names) in checks/* needed for check itself
1795 check_config_variables = [] # type: List[Any]
1796 # whichs OIDs to fetch for which check (for tabular information)
1797 snmp_info = {} # type: Dict[str, Union[Tuple[Any], List[Tuple[Any]]]]
1798 # SNMP autodetection
1799 snmp_scan_functions = {} # type: Dict[str, Callable[[Callable[[str], str]], bool]]
1800 # definitions of active "legacy" checks
1801 active_check_info = {} # type: Dict[str, Dict[str, Any]]
1802 special_agent_info = {
1803 } # type: Dict[str, Callable[[Dict[str, Any], str, str], Union[str, List[str]]]]
1805 # Names of variables registered in the check files. This is used to
1806 # keep track of the variables needed by each file. Those variables are then
1807 # (if available) read from the config and applied to the checks module after
1808 # reading in the configuration of the user.
1809 _check_variables = {} # type: Dict[str, List[Any]]
1810 # keeps the default values of all the check variables
1811 _check_variable_defaults = {} # type: Dict[str, Any]
1812 _all_checks_loaded = False
1814 # workaround: set of check-groups that are to be treated as service-checks even if
1815 # the item is None
1816 service_rule_groups = set(["temperature"])
1819 # .--Loading-------------------------------------------------------------.
1820 # | _ _ _ |
1821 # | | | ___ __ _ __| (_)_ __ __ _ |
1822 # | | | / _ \ / _` |/ _` | | '_ \ / _` | |
1823 # | | |__| (_) | (_| | (_| | | | | | (_| | |
1824 # | |_____\___/ \__,_|\__,_|_|_| |_|\__, | |
1825 # | |___/ |
1826 # +----------------------------------------------------------------------+
1827 # | Loading of check plugins |
1828 # '----------------------------------------------------------------------'
1831 def load_all_checks(get_check_api_context):
1832 """Load all checks and includes"""
1833 global _all_checks_loaded
1835 _initialize_data_structures()
1836 filelist = get_plugin_paths(cmk.utils.paths.local_checks_dir, cmk.utils.paths.checks_dir)
1837 load_checks(get_check_api_context, filelist)
1839 _all_checks_loaded = True
1842 def _initialize_data_structures():
1843 """Initialize some data structures which are populated while loading the checks"""
1844 global _all_checks_loaded
1845 _all_checks_loaded = False
1847 _check_variables.clear()
1848 _check_variable_defaults.clear()
1850 _check_contexts.clear()
1851 check_info.clear()
1852 check_includes.clear()
1853 precompile_params.clear()
1854 check_default_levels.clear()
1855 factory_settings.clear()
1856 del check_config_variables[:]
1857 snmp_info.clear()
1858 snmp_scan_functions.clear()
1859 active_check_info.clear()
1860 special_agent_info.clear()
1863 def get_plugin_paths(*dirs):
1864 filelist = []
1865 for directory in dirs:
1866 filelist += _plugin_pathnames_in_directory(directory)
1867 return filelist
1870 # Now read in all checks. Note: this is done *before* reading the
1871 # configuration, because checks define variables with default
1872 # values user can override those variables in his configuration.
1873 # If a check or check.include is both found in local/ and in the
1874 # normal structure, then only the file in local/ must be read!
1875 def load_checks(get_check_api_context, filelist):
1876 cmk_global_vars = set(get_variable_names())
1878 loaded_files = set()
1880 for f in filelist:
1881 if f[0] == "." or f[-1] == "~":
1882 continue # ignore editor backup / temp files
1884 file_name = os.path.basename(f)
1885 if file_name in loaded_files:
1886 continue # skip already loaded files (e.g. from local)
1888 try:
1889 check_context = new_check_context(get_check_api_context)
1891 known_vars = check_context.keys()
1892 known_checks = check_info.keys()
1893 known_active_checks = active_check_info.keys()
1895 load_check_includes(f, check_context)
1897 load_precompiled_plugin(f, check_context)
1898 loaded_files.add(file_name)
1900 except MKTerminate:
1901 raise
1903 except Exception as e:
1904 console.error("Error in plugin file %s: %s\n", f, e)
1905 if cmk.utils.debug.enabled():
1906 raise
1907 else:
1908 continue
1910 new_checks = set(check_info.keys()).difference(known_checks)
1911 new_active_checks = set(active_check_info.keys()).difference(known_active_checks)
1913 # Now store the check context for all checks found in this file
1914 for check_plugin_name in new_checks:
1915 _check_contexts[check_plugin_name] = check_context
1917 for check_plugin_name in new_active_checks:
1918 _check_contexts[check_plugin_name] = check_context
1920 # Collect all variables that the check file did introduce compared to the
1921 # default check context
1922 new_check_vars = {}
1923 for varname in set(check_context.keys()).difference(known_vars):
1924 new_check_vars[varname] = check_context[varname]
1926 # The default_levels_variable of check_info also declares use of a global
1927 # variable. Register it here for this context.
1928 for check_plugin_name in new_checks:
1929 # The check_info is not converted yet (convert_check_info()). This means we need
1930 # to deal with old style tuple configured checks
1931 if isinstance(check_info[check_plugin_name], tuple):
1932 default_levels_varname = check_default_levels.get(check_plugin_name)
1933 else:
1934 default_levels_varname = check_info[check_plugin_name].get(
1935 "default_levels_variable")
1937 if default_levels_varname:
1938 # Add the initial configuration to the check context to have a consistent state
1939 check_context[default_levels_varname] = factory_settings.get(
1940 default_levels_varname, {})
1941 new_check_vars[default_levels_varname] = check_context[default_levels_varname]
1943 # Save check variables for e.g. after config loading that the config can
1944 # be added to the check contexts
1945 for varname, value in new_check_vars.items():
1946 # Do not allow checks to override Check_MK builtin global variables. Silently
1947 # skip them here. The variables will only be locally available to the checks.
1948 if varname in cmk_global_vars:
1949 continue
1951 if varname.startswith("_"):
1952 continue
1954 if inspect.isfunction(value) or inspect.ismodule(value):
1955 continue
1957 _check_variable_defaults[varname] = value
1959 # Keep track of which variable needs to be set to which context
1960 context_ident_list = _check_variables.setdefault(varname, [])
1961 context_ident_list += new_checks
1962 context_ident_list += new_active_checks
1964 # Now convert check_info to new format.
1965 convert_check_info()
1966 verify_checkgroup_members()
1967 initialize_check_type_caches()
1970 def all_checks_loaded():
1971 """Whether or not all(!) checks have been loaded into the current process"""
1972 return _all_checks_loaded
1975 def any_check_loaded():
1976 """Whether or not some checks have been loaded into the current process"""
1977 return bool(_check_contexts)
1980 # Constructs a new check context dictionary. It contains the whole check API.
1981 def new_check_context(get_check_api_context):
1982 # Add the data structures where the checks register with Check_MK
1983 context = {
1984 "check_info": check_info,
1985 "check_includes": check_includes,
1986 "precompile_params": precompile_params,
1987 "check_default_levels": check_default_levels,
1988 "factory_settings": factory_settings,
1989 "check_config_variables": check_config_variables,
1990 "snmp_info": snmp_info,
1991 "snmp_scan_functions": snmp_scan_functions,
1992 "active_check_info": active_check_info,
1993 "special_agent_info": special_agent_info,
1995 # NOTE: For better separation it would be better to copy the values, but
1996 # this might consume too much memory, so we simply reference them.
1997 context.update(get_check_api_context())
1998 return context
2001 # Load the definitions of the required include files for this check
2002 # Working with imports when specifying the includes would be much cleaner,
2003 # sure. But we need to deal with the current check API.
2004 def load_check_includes(check_file_path, check_context):
2005 for include_file_name in cached_includes_of_plugin(check_file_path):
2006 include_file_path = check_include_file_path(include_file_name)
2007 try:
2008 load_precompiled_plugin(include_file_path, check_context)
2009 except MKTerminate:
2010 raise
2012 except Exception as e:
2013 console.error("Error in check include file %s: %s\n", include_file_path, e)
2014 if cmk.utils.debug.enabled():
2015 raise
2016 else:
2017 continue
2020 def check_include_file_path(include_file_name):
2021 local_path = os.path.join(cmk.utils.paths.local_checks_dir, include_file_name)
2022 if os.path.exists(local_path):
2023 return local_path
2024 return os.path.join(cmk.utils.paths.checks_dir, include_file_name)
2027 def cached_includes_of_plugin(check_file_path):
2028 cache_file_path = _include_cache_file_path(check_file_path)
2029 try:
2030 return _get_cached_check_includes(check_file_path, cache_file_path)
2031 except OSError:
2032 pass # No usable cache. Terminate
2034 includes = includes_of_plugin(check_file_path)
2035 _write_check_include_cache(cache_file_path, includes)
2036 return includes
2039 def _get_cached_check_includes(check_file_path, cache_file_path):
2040 check_stat = os.stat(check_file_path)
2041 cache_stat = os.stat(cache_file_path)
2043 if check_stat.st_mtime >= cache_stat.st_mtime:
2044 raise OSError("Cache is too old")
2046 # There are no includes (just the newline at the end)
2047 if cache_stat.st_size == 1:
2048 return [] # No includes
2050 # store.save_file() creates file empty for locking (in case it does not exists).
2051 # Skip loading the file.
2052 # Note: When raising here this process will also write the file. This means it
2053 # will write it another time after it was written by the other process. This
2054 # could be optimized. Since the whole caching here is a temporary(tm) soltion,
2055 # we leave it as it is.
2056 if cache_stat.st_size == 0:
2057 raise OSError("Cache generation in progress (file is locked)")
2059 x = open(cache_file_path).read().strip()
2060 if not x:
2061 return [] # Shouldn't happen. Empty files are handled above
2062 return x.split("|")
2065 def _write_check_include_cache(cache_file_path, includes):
2066 store.makedirs(os.path.dirname(cache_file_path))
2067 store.save_file(cache_file_path, "%s\n" % "|".join(includes))
2070 def _include_cache_file_path(path):
2071 is_local = path.startswith(cmk.utils.paths.local_checks_dir)
2072 return os.path.join(cmk.utils.paths.include_cache_dir, "local" if is_local else "builtin",
2073 os.path.basename(path))
2076 # Parse the check file without executing the code to find the check include
2077 # files the check uses. The following statements are extracted:
2078 # check_info[...] = { "includes": [...] }
2079 # inv_info[...] = { "includes": [...] }
2080 # check_includes[...] = [...]
2081 def includes_of_plugin(check_file_path):
2082 include_names = OrderedDict()
2084 def _load_from_check_info(node):
2085 if not isinstance(node.value, ast.Dict):
2086 return
2088 for key, val in zip(node.value.keys, node.value.values):
2089 if key.s == "includes":
2090 if isinstance(val, ast.List):
2091 for element in val.elts:
2092 include_names[element.s] = True
2093 else:
2094 raise MKGeneralException("Includes must be a list of include file names, "
2095 "found '%s'" % type(val))
2097 def _load_from_check_includes(node):
2098 if isinstance(node.value, ast.List):
2099 for element in node.value.elts:
2100 include_names[element.s] = True
2102 tree = ast.parse(open(check_file_path).read())
2103 for child in ast.iter_child_nodes(tree):
2104 if not isinstance(child, ast.Assign):
2105 continue # We only care about top level assigns
2107 # Filter out assignments to check_info dictionary
2108 for target in child.targets:
2109 if isinstance(target, ast.Subscript) and isinstance(target.value, ast.Name):
2110 if target.value.id in ["check_info", "inv_info"]:
2111 _load_from_check_info(child)
2112 elif target.value.id == "check_includes":
2113 _load_from_check_includes(child)
2115 return include_names.keys()
2118 def _plugin_pathnames_in_directory(path):
2119 if path and os.path.exists(path):
2120 return sorted([
2121 path + "/" + f
2122 for f in os.listdir(path)
2123 if not f.startswith(".") and not f.endswith(".include")
2125 return []
2128 def load_precompiled_plugin(path, check_context):
2129 """Loads the given check or check include plugin into the given
2130 check context.
2132 To improve loading speed the files are not read directly. The files are
2133 python byte-code compiled before in case it has not been done before. In
2134 case there is already a compiled file that is newer than the current one,
2135 then the precompiled file is loaded."""
2137 precompiled_path = _precompiled_plugin_path(path)
2139 if not _is_plugin_precompiled(path, precompiled_path):
2140 console.vverbose("Precompile %s to %s\n" % (path, precompiled_path))
2141 store.makedirs(os.path.dirname(precompiled_path))
2142 py_compile.compile(path, precompiled_path, doraise=True)
2144 exec (marshal.loads(open(precompiled_path, "rb").read()[8:]), check_context)
2147 def _is_plugin_precompiled(path, precompiled_path):
2148 if not os.path.exists(precompiled_path):
2149 return False
2151 # Check precompiled file header
2152 f = open(precompiled_path, "rb")
2154 file_magic = f.read(4)
2155 if file_magic != py_compile.MAGIC:
2156 return False
2158 try:
2159 origin_file_mtime = struct.unpack("I", f.read(4))[0]
2160 except struct.error:
2161 return False
2163 if long(os.stat(path).st_mtime) != origin_file_mtime:
2164 return False
2166 return True
2169 def _precompiled_plugin_path(path):
2170 is_local = path.startswith(cmk.utils.paths.local_checks_dir)
2171 return os.path.join(cmk.utils.paths.precompiled_checks_dir, "local" if is_local else "builtin",
2172 os.path.basename(path))
2175 def check_variable_names():
2176 return _check_variables.keys()
2179 def get_check_variable_defaults():
2180 """Returns the check variable default settings. These are the settings right
2181 after loading the checks."""
2182 return _check_variable_defaults
2185 def set_check_variables(check_variables):
2186 """Update the check related config variables in the relevant check contexts"""
2187 for varname, value in check_variables.items():
2188 for context_ident in _check_variables[varname]:
2189 _check_contexts[context_ident][varname] = value
2192 def get_check_variables():
2193 """Returns the currently effective check variable settings
2195 Since the variables are only stored in the individual check contexts and not stored
2196 in a central place, this function needs to collect the values from the check contexts.
2197 We assume a single variable has the same value in all relevant contexts, which means
2198 that it is enough to get the variable from the first context."""
2199 check_config = {}
2200 for varname, context_ident_list in _check_variables.iteritems():
2201 check_config[varname] = _check_contexts[context_ident_list[0]][varname]
2202 return check_config
2205 def get_check_context(check_plugin_name):
2206 """Returns the context dictionary of the given check plugin"""
2207 return _check_contexts[check_plugin_name]
2210 # FIXME: Clear / unset all legacy variables to prevent confusions in other code trying to
2211 # use the legacy variables which are not set by newer checks.
2212 def convert_check_info():
2213 check_info_defaults = {
2214 "check_function": None,
2215 "inventory_function": None,
2216 "parse_function": None,
2217 "group": None,
2218 "snmp_info": None,
2219 "snmp_scan_function": None,
2220 "handle_empty_info": False,
2221 "handle_real_time_checks": False,
2222 "default_levels_variable": None,
2223 "node_info": False,
2224 "extra_sections": [],
2225 "service_description": None,
2226 "has_perfdata": False,
2227 "management_board": None,
2230 for check_plugin_name, info in check_info.items():
2231 section_name = cmk_base.check_utils.section_name_of(check_plugin_name)
2233 if not isinstance(info, dict):
2234 # Convert check declaration from old style to new API
2235 check_function, descr, has_perfdata, inventory_function = info
2237 scan_function = snmp_scan_functions.get(check_plugin_name,
2238 snmp_scan_functions.get(section_name))
2240 check_info[check_plugin_name] = {
2241 "check_function": check_function,
2242 "service_description": descr,
2243 "has_perfdata": bool(has_perfdata),
2244 "inventory_function": inventory_function,
2245 # Insert check name as group if no group is being defined
2246 "group": check_plugin_name,
2247 "snmp_info": snmp_info.get(check_plugin_name),
2248 # Sometimes the scan function is assigned to the check_plugin_name
2249 # rather than to the base name.
2250 "snmp_scan_function": scan_function,
2251 "handle_empty_info": False,
2252 "handle_real_time_checks": False,
2253 "default_levels_variable": check_default_levels.get(check_plugin_name),
2254 "node_info": False,
2255 "parse_function": None,
2256 "extra_sections": [],
2257 "management_board": None,
2259 else:
2260 # Ensure that there are only the known keys set. Is meant to detect typos etc.
2261 for key in info.keys():
2262 if key != "includes" and key not in check_info_defaults:
2263 raise MKGeneralException(
2264 "The check '%s' declares an unexpected key '%s' in 'check_info'." %
2265 (check_plugin_name, key))
2267 # Check does already use new API. Make sure that all keys are present,
2268 # extra check-specific information into file-specific variables.
2269 for key, val in check_info_defaults.items():
2270 info.setdefault(key, val)
2272 # Include files are related to the check file (= the section_name),
2273 # not to the (sub-)check. So we keep them in check_includes.
2274 check_includes.setdefault(section_name, [])
2275 check_includes[section_name] += info.get("includes", [])
2277 # Make sure that setting for node_info of check and subcheck matches
2278 for check_plugin_name, info in check_info.iteritems():
2279 if "." in check_plugin_name:
2280 section_name = cmk_base.check_utils.section_name_of(check_plugin_name)
2281 if section_name not in check_info:
2282 if info["node_info"]:
2283 raise MKGeneralException(
2284 "Invalid check implementation: node_info for %s is "
2285 "True, but base check %s not defined" % (check_plugin_name, section_name))
2287 elif check_info[section_name]["node_info"] != info["node_info"]:
2288 raise MKGeneralException(
2289 "Invalid check implementation: node_info for %s "
2290 "and %s are different." % ((section_name, check_plugin_name)))
2292 # Now gather snmp_info and snmp_scan_function back to the
2293 # original arrays. Note: these information is tied to a "agent section",
2294 # not to a check. Several checks may use the same SNMP info and scan function.
2295 for check_plugin_name, info in check_info.iteritems():
2296 section_name = cmk_base.check_utils.section_name_of(check_plugin_name)
2297 if info["snmp_info"] and section_name not in snmp_info:
2298 snmp_info[section_name] = info["snmp_info"]
2300 if info["snmp_scan_function"] and section_name not in snmp_scan_functions:
2301 snmp_scan_functions[section_name] = info["snmp_scan_function"]
2304 # This function validates the checks which are members of checkgroups to have either
2305 # all or none an item. Mixed checkgroups lead to strange exceptions when processing
2306 # the check parameters. So it is much better to catch these errors in a central place
2307 # with a clear error message.
2308 def verify_checkgroup_members():
2309 groups = checks_by_checkgroup()
2311 for group_name, check_entries in groups.items():
2312 with_item, without_item = [], []
2313 for check_plugin_name, check_info_entry in check_entries:
2314 # Trying to detect whether or not the check has an item. But this mechanism is not
2315 # 100% reliable since Check_MK appends an item to the service_description when "%s"
2316 # is not in the checks service_description template.
2317 # Maybe we need to define a new rule which enforces the developer to use the %s in
2318 # the service_description. At least for grouped checks.
2319 if "%s" in check_info_entry["service_description"]:
2320 with_item.append(check_plugin_name)
2321 else:
2322 without_item.append(check_plugin_name)
2324 if with_item and without_item:
2325 raise MKGeneralException(
2326 "Checkgroup %s has checks with and without item! At least one of "
2327 "the checks in this group needs to be changed (With item: %s, "
2328 "Without item: %s)" % (group_name, ", ".join(with_item), ", ".join(without_item)))
2331 def checks_by_checkgroup():
2332 groups = {}
2333 for check_plugin_name, check in check_info.items():
2334 group_name = check["group"]
2335 if group_name:
2336 groups.setdefault(group_name, [])
2337 groups[group_name].append((check_plugin_name, check))
2338 return groups
2341 # These caches both only hold the base names of the checks
2342 def initialize_check_type_caches():
2343 snmp_cache = cmk_base.runtime_cache.get_set("check_type_snmp")
2344 snmp_cache.update(snmp_info.keys())
2346 tcp_cache = cmk_base.runtime_cache.get_set("check_type_tcp")
2347 for check_plugin_name in check_info:
2348 section_name = cmk_base.check_utils.section_name_of(check_plugin_name)
2349 if section_name not in snmp_cache:
2350 tcp_cache.add(section_name)
2354 # .--Helpers-------------------------------------------------------------.
2355 # | _ _ _ |
2356 # | | | | | ___| |_ __ ___ _ __ ___ |
2357 # | | |_| |/ _ \ | '_ \ / _ \ '__/ __| |
2358 # | | _ | __/ | |_) | __/ | \__ \ |
2359 # | |_| |_|\___|_| .__/ \___|_| |___/ |
2360 # | |_| |
2361 # +----------------------------------------------------------------------+
2362 # | Misc check related helper functions |
2363 # '----------------------------------------------------------------------'
2366 def discoverable_tcp_checks():
2367 types = []
2368 for check_plugin_name, check in check_info.items():
2369 if cmk_base.check_utils.is_tcp_check(check_plugin_name) and check["inventory_function"]:
2370 types.append(check_plugin_name)
2371 return sorted(types)
2374 def discoverable_snmp_checks():
2375 types = []
2376 for check_plugin_name, check in check_info.items():
2377 if cmk_base.check_utils.is_snmp_check(check_plugin_name) and check["inventory_function"]:
2378 types.append(check_plugin_name)
2379 return sorted(types)
2382 # Compute parameters for a check honoring factory settings,
2383 # default settings of user in main.mk, check_parameters[] and
2384 # the values code in autochecks (given as parameter params)
2385 def compute_check_parameters(host, checktype, item, params):
2386 if checktype not in check_info: # handle vanished checktype
2387 return None
2389 params = _update_with_default_check_parameters(checktype, params)
2390 params = _update_with_configured_check_parameters(host, checktype, item, params)
2392 return params
2395 def _update_with_default_check_parameters(checktype, params):
2396 # Handle dictionary based checks
2397 def_levels_varname = check_info[checktype].get("default_levels_variable")
2399 # Handle case where parameter is None but the type of the
2400 # default value is a dictionary. This is for example the
2401 # case if a check type has gotten parameters in a new version
2402 # but inventory of the old version left None as a parameter.
2403 # Also from now on we support that the inventory simply puts
2404 # None as a parameter. We convert that to an empty dictionary
2405 # that will be updated with the factory settings and default
2406 # levels, if possible.
2407 if params is None and def_levels_varname:
2408 fs = factory_settings.get(def_levels_varname)
2409 if isinstance(fs, dict):
2410 params = {}
2412 # Honor factory settings for dict-type checks. Merge
2413 # dict type checks with multiple matching rules
2414 if isinstance(params, dict):
2416 # Start with factory settings
2417 if def_levels_varname:
2418 new_params = factory_settings.get(def_levels_varname, {}).copy()
2419 else:
2420 new_params = {}
2422 # Merge user's default settings onto it
2423 check_context = _check_contexts[checktype]
2424 if def_levels_varname and def_levels_varname in check_context:
2425 def_levels = check_context[def_levels_varname]
2426 if isinstance(def_levels, dict):
2427 new_params.update(def_levels)
2429 # Merge params from inventory onto it
2430 new_params.update(params)
2431 params = new_params
2433 return params
2436 def _update_with_configured_check_parameters(host, checktype, item, params):
2437 descr = service_description(host, checktype, item)
2439 config_cache = get_config_cache()
2441 # Get parameters configured via checkgroup_parameters
2442 entries = _get_checkgroup_parameters(config_cache, host, checktype, item)
2444 # Get parameters configured via check_parameters
2445 entries += config_cache.service_extra_conf(host, descr, check_parameters)
2447 if entries:
2448 if _has_timespecific_params(entries):
2449 # some parameters include timespecific settings
2450 # these will be executed just before the check execution
2451 return TimespecificParamList(entries)
2453 # loop from last to first (first must have precedence)
2454 for entry in entries[::-1]:
2455 if isinstance(params, dict) and isinstance(entry, dict):
2456 params.update(entry)
2457 else:
2458 if isinstance(entry, dict):
2459 # The entry still has the reference from the rule..
2460 # If we don't make a deepcopy the rule might be modified by
2461 # a followup params.update(...)
2462 entry = copy.deepcopy(entry)
2463 params = entry
2464 return params
2467 def _has_timespecific_params(entries):
2468 for entry in entries:
2469 if isinstance(entry, dict) and "tp_default_value" in entry:
2470 return True
2471 return False
2474 def _get_checkgroup_parameters(config_cache, host, checktype, item):
2475 checkgroup = check_info[checktype]["group"]
2476 if not checkgroup:
2477 return []
2478 rules = checkgroup_parameters.get(checkgroup)
2479 if rules is None:
2480 return []
2482 try:
2483 # checks without an item
2484 if item is None and checkgroup not in service_rule_groups:
2485 return config_cache.host_extra_conf(host, rules)
2487 # checks with an item need service-specific rules
2488 return config_cache.service_extra_conf(host, item, rules)
2489 except MKGeneralException as e:
2490 raise MKGeneralException(str(e) + " (on host %s, checktype %s)" % (host, checktype))
2493 def do_status_data_inventory_for(hostname):
2494 rules = active_checks.get('cmk_inv')
2495 if rules is None:
2496 return False
2498 # 'host_extra_conf' is already cached thus we can
2499 # use it after every check cycle.
2500 entries = get_config_cache().host_extra_conf(hostname, rules)
2502 if not entries:
2503 return False # No matching rule -> disable
2505 # Convert legacy rules to current dict format (just like the valuespec)
2506 params = {} if entries[0] is None else entries[0]
2508 return params.get('status_data_inventory', False)
2511 def do_host_label_discovery_for(hostname):
2512 rules = active_checks.get('cmk_inv')
2513 if rules is None:
2514 return True
2516 entries = get_config_cache().host_extra_conf(hostname, rules)
2518 if not entries:
2519 return True # No matching rule -> disable
2521 # Convert legacy rules to current dict format (just like the valuespec)
2522 params = {} if entries[0] is None else entries[0]
2524 return params.get("host_label_inventory", True)
2527 def filter_by_management_board(hostname,
2528 found_check_plugin_names,
2529 for_mgmt_board,
2530 for_discovery=False,
2531 for_inventory=False):
2533 In order to decide which check is used for which data source
2534 we have to filter the found check plugins. This is done via
2535 the check_info key "management_board". There are three values
2536 with the following meanings:
2537 - MGMT_ONLY
2538 These check plugins
2539 - are only used for management board data sources,
2540 - have the prefix 'mgmt_' in their name,
2541 - have the prefix 'Management Interface:' in their service description.
2542 - If there is an equivalent host check plugin then it must be 'HOST_ONLY'.
2544 - HOST_PRECEDENCE
2545 - Default value for all check plugins.
2546 - It does not have to be declared in the check_info.
2547 - Special situation for SNMP management boards:
2548 - If a host is not a SNMP host these checks are used for
2549 the SNMP management boards.
2550 - If a host is a SNMP host these checks are used for
2551 the host itself.
2553 - HOST_ONLY
2554 These check plugins
2555 - are used for 'real' host data sources, not for host management board data sources
2556 - there is an equivalent 'MGMT_ONLY'-management board check plugin.
2559 mgmt_only, host_precedence_snmp, host_only_snmp, host_precedence_tcp, host_only_tcp =\
2560 _get_categorized_check_plugins(found_check_plugin_names, for_inventory=for_inventory)
2562 config_cache = get_config_cache()
2563 host_config = config_cache.get_host_config(hostname)
2565 final_collection = set()
2566 if not host_config.has_management_board:
2567 if host_config.is_snmp_host:
2568 final_collection.update(host_precedence_snmp)
2569 final_collection.update(host_only_snmp)
2570 if host_config.is_agent_host:
2571 final_collection.update(host_precedence_tcp)
2572 final_collection.update(host_only_tcp)
2573 return final_collection
2575 if for_mgmt_board:
2576 final_collection.update(mgmt_only)
2577 if not host_config.is_snmp_host:
2578 final_collection.update(host_precedence_snmp)
2579 if not for_discovery:
2580 # Migration from 1.4 to 1.5:
2581 # in 1.4 TCP hosts with SNMP management boards discovered TCP and
2582 # SNMP checks, eg. uptime and snmp_uptime. During checking phase
2583 # these checks should be executed
2584 # further on.
2585 # In versions >= 1.5 there are management board specific check
2586 # plugins, eg. mgmt_snmp_uptime.
2587 # After a re-discovery Check_MK finds the uptime check plugin for
2588 # the TCP host and the mgmt_snmp_uptime check for the SNMP
2589 # management board. Moreover Check_MK eliminates 'HOST_ONLT'
2590 # checks like snmp_uptime.
2591 final_collection.update(host_only_snmp)
2593 else:
2594 if host_config.is_snmp_host:
2595 final_collection.update(host_precedence_snmp)
2596 final_collection.update(host_only_snmp)
2597 if host_config.is_agent_host:
2598 final_collection.update(host_precedence_tcp)
2599 final_collection.update(host_only_tcp)
2601 return final_collection
2604 def _get_categorized_check_plugins(check_plugin_names, for_inventory=False):
2605 if for_inventory:
2606 is_snmp_check_f = cmk_base.inventory_plugins.is_snmp_plugin
2607 plugins_info = cmk_base.inventory_plugins.inv_info
2608 else:
2609 is_snmp_check_f = cmk_base.check_utils.is_snmp_check
2610 plugins_info = check_info
2612 mgmt_only = set()
2613 host_precedence_snmp = set()
2614 host_precedence_tcp = set()
2615 host_only_snmp = set()
2616 host_only_tcp = set()
2618 for check_plugin_name in check_plugin_names:
2619 if check_plugin_name not in plugins_info:
2620 msg = "Unknown plugin file %s" % check_plugin_name
2621 if cmk.utils.debug.enabled():
2622 raise MKGeneralException(msg)
2623 else:
2624 console.verbose("%s\n" % msg)
2625 continue
2627 is_snmp_check_ = is_snmp_check_f(check_plugin_name)
2628 mgmt_board = _get_management_board_precedence(check_plugin_name, plugins_info)
2629 if mgmt_board == check_api_utils.HOST_PRECEDENCE:
2630 if is_snmp_check_:
2631 host_precedence_snmp.add(check_plugin_name)
2632 else:
2633 host_precedence_tcp.add(check_plugin_name)
2635 elif mgmt_board == check_api_utils.MGMT_ONLY:
2636 mgmt_only.add(check_plugin_name)
2638 elif mgmt_board == check_api_utils.HOST_ONLY:
2639 if is_snmp_check_:
2640 host_only_snmp.add(check_plugin_name)
2641 else:
2642 host_only_tcp.add(check_plugin_name)
2644 return mgmt_only, host_precedence_snmp, host_only_snmp,\
2645 host_precedence_tcp, host_only_tcp
2648 def _get_management_board_precedence(check_plugin_name, plugins_info):
2649 mgmt_board = plugins_info[check_plugin_name].get("management_board")
2650 if mgmt_board is None:
2651 return check_api_utils.HOST_PRECEDENCE
2652 return mgmt_board
2655 cmk_base.cleanup.register_cleanup(check_api_utils.reset_hostname)
2658 # .--Host Configuration--------------------------------------------------.
2659 # | _ _ _ |
2660 # | | | | | ___ ___| |_ |
2661 # | | |_| |/ _ \/ __| __| |
2662 # | | _ | (_) \__ \ |_ |
2663 # | |_| |_|\___/|___/\__| |
2664 # | |
2665 # | ____ __ _ _ _ |
2666 # | / ___|___ _ __ / _(_) __ _ _ _ _ __ __ _| |_(_) ___ _ __ |
2667 # | | | / _ \| '_ \| |_| |/ _` | | | | '__/ _` | __| |/ _ \| '_ \ |
2668 # | | |__| (_) | | | | _| | (_| | |_| | | | (_| | |_| | (_) | | | | |
2669 # | \____\___/|_| |_|_| |_|\__, |\__,_|_| \__,_|\__|_|\___/|_| |_| |
2670 # | |___/ |
2671 # +----------------------------------------------------------------------+
2674 class HostConfig(object):
2675 def __init__(self, config_cache, hostname):
2676 # type: (ConfigCache, str) -> None
2677 super(HostConfig, self).__init__()
2678 self.hostname = hostname
2680 self._config_cache = config_cache
2682 self.is_cluster = is_cluster(hostname)
2683 self.part_of_clusters = self._config_cache.clusters_of(hostname)
2685 # TODO: Rename self.tags to self.tag_list and self.tag_groups to self.tags
2686 self.tags = self._config_cache.tag_list_of_host(self.hostname)
2687 self.tag_groups = host_tags.get(hostname, {})
2688 self.labels = self._get_host_labels()
2689 self.label_sources = self._get_host_label_sources()
2691 # Basic types
2692 self.is_tcp_host = self._config_cache.in_binary_hostlist(hostname, tcp_hosts)
2693 self.is_snmp_host = self._config_cache.in_binary_hostlist(hostname, snmp_hosts)
2694 self.is_usewalk_host = self._config_cache.in_binary_hostlist(hostname, usewalk_hosts)
2696 if "piggyback" in self.tags:
2697 self.is_piggyback_host = True
2698 elif "no-piggyback" in self.tags:
2699 self.is_piggyback_host = False
2700 else: # Legacy automatic detection
2701 self.is_piggyback_host = self.has_piggyback_data
2703 # Agent types
2704 self.is_agent_host = self.is_tcp_host or self.is_piggyback_host
2705 self.management_protocol = management_protocol.get(hostname)
2706 self.has_management_board = self.management_protocol is not None
2708 self.is_ping_host = not self.is_snmp_host and\
2709 not self.is_agent_host and\
2710 not self.has_management_board
2712 self.is_dual_host = self.is_tcp_host and self.is_snmp_host
2713 self.is_all_agents_host = "all-agents" in self.tags
2714 self.is_all_special_agents_host = "special-agents" in self.tags
2716 # IP addresses
2717 # Whether or not the given host is configured not to be monitored via IP
2718 self.is_no_ip_host = "no-ip" in self.tags
2719 self.is_ipv6_host = "ip-v6" in self.tags
2720 # Whether or not the given host is configured to be monitored via IPv4.
2721 # This is the case when it is set to be explicit IPv4 or implicit (when
2722 # host is not an IPv6 host and not a "No IP" host)
2723 self.is_ipv4_host = "ip-v4" in self.tags or (not self.is_ipv6_host and
2724 not self.is_no_ip_host)
2726 self.is_ipv4v6_host = "ip-v6" in self.tags and "ip-v4" in self.tags
2728 # Whether or not the given host is configured to be monitored primarily via IPv6
2729 self.is_ipv6_primary = (not self.is_ipv4v6_host and self.is_ipv6_host) \
2730 or (self.is_ipv4v6_host and self._primary_ip_address_family_of() == "ipv6")
2732 @property
2733 def has_piggyback_data(self):
2734 if piggyback.has_piggyback_raw_data(piggyback_max_cachefile_age, self.hostname):
2735 return True
2737 from cmk_base.data_sources.piggyback import PiggyBackDataSource
2738 return PiggyBackDataSource(self.hostname, None).has_persisted_agent_sections()
2740 def _primary_ip_address_family_of(self):
2741 rules = self._config_cache.host_extra_conf(self.hostname, primary_address_family)
2742 if rules:
2743 return rules[0]
2744 return "ipv4"
2746 def _get_host_labels(self):
2747 """Returns the effective set of host labels from all available sources
2749 1. Discovered labels
2750 2. Ruleset "Host labels"
2751 3. Explicit labels (via host/folder config)
2753 Last one wins.
2755 labels = {}
2756 labels.update(self._discovered_labels_of_host())
2757 labels.update(self._config_cache.host_extra_conf_merged(self.hostname, host_label_rules))
2758 labels.update(host_labels.get(self.hostname, {}))
2759 return labels
2761 def _get_host_label_sources(self):
2762 """Returns the effective set of host label keys with their source identifier instead of the value
2763 Order and merging logic is equal to _get_host_labels()"""
2764 labels = {}
2765 labels.update({k: "discovered" for k in self._discovered_labels_of_host().keys()})
2766 labels.update({k : "ruleset" \
2767 for k in self._config_cache.host_extra_conf_merged(self.hostname, host_label_rules)})
2768 labels.update({k: "explicit" for k in host_labels.get(self.hostname, {}).keys()})
2769 return labels
2771 def _discovered_labels_of_host(self):
2772 # type: () -> Dict
2773 return DiscoveredHostLabelsStore(self.hostname).load()
2777 # .--Configuration Cache-------------------------------------------------.
2778 # | ____ __ _ _ _ |
2779 # | / ___|___ _ __ / _(_) __ _ _ _ _ __ __ _| |_(_) ___ _ __ |
2780 # | | | / _ \| '_ \| |_| |/ _` | | | | '__/ _` | __| |/ _ \| '_ \ |
2781 # | | |__| (_) | | | | _| | (_| | |_| | | | (_| | |_| | (_) | | | | |
2782 # | \____\___/|_| |_|_| |_|\__, |\__,_|_| \__,_|\__|_|\___/|_| |_| |
2783 # | |___/ |
2784 # | ____ _ |
2785 # | / ___|__ _ ___| |__ ___ |
2786 # | | | / _` |/ __| '_ \ / _ \ |
2787 # | | |__| (_| | (__| | | | __/ |
2788 # | \____\__,_|\___|_| |_|\___| |
2789 # | |
2790 # +----------------------------------------------------------------------+
2793 class ConfigCache(object):
2794 def __init__(self):
2795 super(ConfigCache, self).__init__()
2796 self._initialize_caches()
2798 def initialize(self):
2799 self._initialize_caches()
2800 self._collect_hosttags()
2801 self._setup_clusters_nodes_cache()
2803 self._all_processed_hosts = all_active_hosts()
2804 self._all_configured_hosts = all_configured_hosts()
2805 self._initialize_host_lookup()
2807 def _initialize_caches(self):
2808 self.single_host_checks = cmk_base.config_cache.get_dict("single_host_checks")
2809 self.multi_host_checks = cmk_base.config_cache.get_list("multi_host_checks")
2810 self.check_table_cache = cmk_base.config_cache.get_dict("check_tables")
2812 self._cache_is_snmp_check = cmk_base.runtime_cache.get_dict("is_snmp_check")
2813 self._cache_is_tcp_check = cmk_base.runtime_cache.get_dict("is_tcp_check")
2814 self._cache_section_name_of = {}
2816 # Host lookup
2818 # Contains all hostnames which are currently relevant for this cache
2819 # Most of the time all_processed hosts is similar to all_active_hosts
2820 # Howewer, in a multiprocessing environment all_processed_hosts only
2821 # may contain a reduced set of hosts, since each process handles a subset
2822 self._all_processed_hosts = set()
2823 self._all_configured_hosts = set()
2825 # Reference hostname -> dirname including /
2826 self._host_paths = {}
2827 # Reference dirname -> hosts in this dir including subfolders
2828 self._folder_host_lookup = {}
2829 # All used folders used for various set intersection operations
2830 self._folder_path_set = set()
2832 # Host tags
2833 self._hosttags = {}
2834 self._hosttags_without_folder = {}
2836 # Reference hosttags_without_folder -> list of hosts
2837 # Provides a list of hosts with the same hosttags, excluding the folder
2838 self._hosts_grouped_by_tags = {}
2839 # Reference hostname -> tag group reference
2840 self._host_grouped_ref = {}
2842 # Autochecks cache
2843 self._autochecks_cache = {}
2845 # Cache for all_matching_host
2846 self._all_matching_hosts_match_cache = {}
2848 # Caches for host_extra_conf
2849 self._host_extra_conf_ruleset_cache = {}
2850 self._host_extra_conf_match_cache = {}
2852 # Caches for service_extra_conf
2853 self._service_extra_conf_ruleset_cache = {}
2854 self._service_extra_conf_host_matched_ruleset_cache = {}
2855 self._service_extra_conf_match_cache = {}
2857 # Caches for in_boolean_serviceconf_list
2858 self._in_boolean_service_conf_list_ruleset_cache = {}
2859 self._in_boolean_service_conf_list_match_cache = {}
2861 # Cache for in_binary_hostlist
2862 self._in_binary_hostlist_cache = {}
2864 # Caches for nodes and clusters
2865 self._clusters_of_cache = {}
2866 self._nodes_of_cache = {}
2868 # A factor which indicates how much hosts share the same host tag configuration (excluding folders).
2869 # len(all_processed_hosts) / len(different tag combinations)
2870 # It is used to determine the best rule evualation method
2871 self._all_processed_hosts_similarity = 1
2873 # Keep HostConfig instances created with the current configuration cache
2874 self._host_configs = {}
2876 def get_host_config(self, hostname):
2877 """Returns a HostConfig instance for the given host
2879 It lazy initializes the host config object and caches the objects during the livetime
2880 of the ConfigCache."""
2881 host_config = self._host_configs.get(hostname)
2882 if host_config:
2883 return host_config
2885 host_config = self._host_configs[hostname] = HostConfig(self, hostname)
2886 return host_config
2888 def _collect_hosttags(self):
2889 for tagged_host in all_hosts + clusters.keys():
2890 parts = tagged_host.split("|")
2891 self._hosttags[parts[0]] = set(parts[1:])
2893 # TODO: check all call sites and remove this
2894 def tag_list_of_host(self, hostname):
2895 """Returns the list of all configured tags of a host. In case
2896 a host has no tags configured or is not known, it returns an
2897 empty list."""
2898 return self._hosttags.get(hostname, [])
2900 def tags_of_host(self, hostname):
2901 """Returns the dict of all configured tag groups and values of a host"""
2902 return host_tags.get(hostname, {})
2904 def tags_of_service(self, hostname, svc_desc):
2905 """Returns the dict of all configured tags of a service
2906 It takes all explicitly configured tag groups into account.
2908 tags = {}
2909 for entry in self.service_extra_conf(hostname, svc_desc, service_tag_rules):
2910 tags.update(entry)
2911 return tags
2913 def labels_of_service(self, hostname, svc_desc):
2914 """Returns the effective set of service labels from all available sources
2916 1. Discovered labels
2917 2. Ruleset "Service labels"
2919 Last one wins.
2921 labels = {}
2922 labels.update(self.service_extra_conf_merged(hostname, svc_desc, service_label_rules))
2923 return labels
2925 def label_sources_of_service(self, hostname, svc_desc):
2926 """Returns the effective set of service label keys with their source identifier instead of the value
2927 Order and merging logic is equal to labels_of_service()"""
2928 labels = {}
2929 labels.update({
2930 k: "ruleset"
2931 for k in self.service_extra_conf_merged(hostname, svc_desc, service_label_rules)
2933 return labels
2935 def set_all_processed_hosts(self, all_processed_hosts):
2936 self._all_processed_hosts = set(all_processed_hosts)
2938 nodes_and_clusters = set()
2939 for hostname in self._all_processed_hosts:
2940 nodes_and_clusters.update(self._nodes_of_cache.get(hostname, []))
2941 nodes_and_clusters.update(self._clusters_of_cache.get(hostname, []))
2942 self._all_processed_hosts.update(nodes_and_clusters)
2944 # The folder host lookup includes a list of all -processed- hosts within a given
2945 # folder. Any update with set_all_processed hosts invalidates this cache, because
2946 # the scope of relevant hosts has changed. This is -good-, since the values in this
2947 # lookup are iterated one by one later on in all_matching_hosts
2948 self._folder_host_lookup = {}
2950 self._adjust_processed_hosts_similarity()
2952 def _adjust_processed_hosts_similarity(self):
2953 """ This function computes the tag similarities between of the processed hosts
2954 The result is a similarity factor, which helps finding the most perfomant operation
2955 for the current hostset """
2956 used_groups = set()
2957 for hostname in self._all_processed_hosts:
2958 used_groups.add(self._host_grouped_ref[hostname])
2959 self._all_processed_hosts_similarity = (
2960 1.0 * len(self._all_processed_hosts) / len(used_groups))
2962 def _initialize_host_lookup(self):
2963 for hostname in self._all_configured_hosts:
2964 dirname_of_host = os.path.dirname(host_paths[hostname])
2965 if dirname_of_host[-1] != "/":
2966 dirname_of_host += "/"
2967 self._host_paths[hostname] = dirname_of_host
2969 # Determine hosts within folders
2970 dirnames = [
2971 x[0][len(cmk.utils.paths.check_mk_config_dir):] + "/+"
2972 for x in os.walk(cmk.utils.paths.check_mk_config_dir)
2974 self._folder_path_set = set(dirnames)
2976 # Determine hosttags without folder tag
2977 for hostname in self._all_configured_hosts:
2978 tags_without_folder = set(self._hosttags[hostname])
2979 try:
2980 tags_without_folder.remove(self._host_paths[hostname])
2981 except KeyError:
2982 pass
2984 self._hosttags_without_folder[hostname] = tags_without_folder
2986 # Determine hosts with same tag setup (ignoring folder tag)
2987 for hostname in self._all_configured_hosts:
2988 group_ref = tuple(sorted(self._hosttags_without_folder[hostname]))
2989 self._hosts_grouped_by_tags.setdefault(group_ref, set()).add(hostname)
2990 self._host_grouped_ref[hostname] = group_ref
2992 def get_hosts_within_folder(self, folder_path, with_foreign_hosts):
2993 cache_id = with_foreign_hosts, folder_path
2994 if cache_id not in self._folder_host_lookup:
2995 hosts_in_folder = set()
2996 # Strip off "+"
2997 folder_path_tmp = folder_path[:-1]
2998 relevant_hosts = self._all_configured_hosts if with_foreign_hosts else self._all_processed_hosts
2999 for hostname in relevant_hosts:
3000 if self._host_paths[hostname].startswith(folder_path_tmp):
3001 hosts_in_folder.add(hostname)
3002 self._folder_host_lookup[cache_id] = hosts_in_folder
3003 return hosts_in_folder
3004 return self._folder_host_lookup[cache_id]
3006 def get_autochecks_of(self, hostname):
3007 try:
3008 return self._autochecks_cache[hostname]
3009 except KeyError:
3010 result = cmk_base.autochecks.read_autochecks_of(hostname)
3011 self._autochecks_cache[hostname] = result
3012 return result
3014 def section_name_of(self, section):
3015 try:
3016 return self._cache_section_name_of[section]
3017 except KeyError:
3018 section_name = cmk_base.check_utils.section_name_of(section)
3019 self._cache_section_name_of[section] = section_name
3020 return section_name
3022 def is_snmp_check(self, check_plugin_name):
3023 try:
3024 return self._cache_is_snmp_check[check_plugin_name]
3025 except KeyError:
3026 snmp_checks = cmk_base.runtime_cache.get_set("check_type_snmp")
3027 result = self.section_name_of(check_plugin_name) in snmp_checks
3028 self._cache_is_snmp_check[check_plugin_name] = result
3029 return result
3031 def is_tcp_check(self, check_plugin_name):
3032 try:
3033 return self._cache_is_tcp_check[check_plugin_name]
3034 except KeyError:
3035 tcp_checks = cmk_base.runtime_cache.get_set("check_type_tcp")
3036 result = self.section_name_of(check_plugin_name) in tcp_checks
3037 self._cache_is_tcp_check[check_plugin_name] = result
3038 return result
3040 def filter_hosts_with_same_tags_as_host(self, hostname, hosts):
3041 return self._hosts_grouped_by_tags[self._host_grouped_ref[hostname]].intersection(hosts)
3043 def all_matching_hosts(self, tags, hostlist, with_foreign_hosts):
3044 """Returns a set containing the names of hosts that match the given
3045 tags and hostlist conditions."""
3046 cache_id = tuple(tags), tuple(hostlist), with_foreign_hosts
3048 try:
3049 return self._all_matching_hosts_match_cache[cache_id]
3050 except KeyError:
3051 pass
3053 if with_foreign_hosts:
3054 valid_hosts = self._all_configured_hosts
3055 else:
3056 valid_hosts = self._all_processed_hosts
3058 tags_set = set(tags)
3059 tags_set_without_folder = tags_set
3060 rule_path_set = tags_set.intersection(self._folder_path_set)
3061 tags_set_without_folder = tags_set - rule_path_set
3063 if rule_path_set:
3064 # More than one dynamic folder in one rule is simply wrong..
3065 rule_path = list(rule_path_set)[0]
3066 else:
3067 rule_path = "/+"
3069 # Thin out the valid hosts further. If the rule is located in a folder
3070 # we only need the intersection of the folders hosts and the previously determined valid_hosts
3071 valid_hosts = self.get_hosts_within_folder(rule_path,
3072 with_foreign_hosts).intersection(valid_hosts)
3074 # Contains matched hosts
3076 if tags_set_without_folder and hostlist == ALL_HOSTS:
3077 return self._match_hosts_by_tags(cache_id, valid_hosts, tags_set_without_folder)
3079 matching = set([])
3080 only_specific_hosts = not bool([x for x in hostlist if x[0] in ["@", "!", "~"]])
3082 # If no tags are specified and there are only specific hosts we already have the matches
3083 if not tags_set_without_folder and only_specific_hosts:
3084 matching = valid_hosts.intersection(hostlist)
3085 # If no tags are specified and the hostlist only include @all (all hosts)
3086 elif not tags_set_without_folder and hostlist == ALL_HOSTS:
3087 matching = valid_hosts
3088 else:
3089 # If the rule has only exact host restrictions, we can thin out the list of hosts to check
3090 if only_specific_hosts:
3091 hosts_to_check = valid_hosts.intersection(set(hostlist))
3092 else:
3093 hosts_to_check = valid_hosts
3095 for hostname in hosts_to_check:
3096 # When no tag matching is requested, do not filter by tags. Accept all hosts
3097 # and filter only by hostlist
3098 if (not tags or
3099 hosttags_match_taglist(self._hosttags[hostname], tags_set_without_folder)):
3100 if in_extraconf_hostlist(hostlist, hostname):
3101 matching.add(hostname)
3103 self._all_matching_hosts_match_cache[cache_id] = matching
3104 return matching
3106 def _match_hosts_by_tags(self, cache_id, valid_hosts, tags_set_without_folder):
3107 matching = set([])
3108 has_specific_folder_tag = sum([x[0] == "/" for x in tags_set_without_folder])
3109 negative_match_tags = set()
3110 positive_match_tags = set()
3111 for tag in tags_set_without_folder:
3112 if tag[0] == "!":
3113 negative_match_tags.add(tag[1:])
3114 else:
3115 positive_match_tags.add(tag)
3117 if has_specific_folder_tag or self._all_processed_hosts_similarity < 3:
3118 # Without shared folders
3119 for hostname in valid_hosts:
3120 if not positive_match_tags - self._hosttags[hostname]:
3121 if not negative_match_tags.intersection(self._hosttags[hostname]):
3122 matching.add(hostname)
3124 self._all_matching_hosts_match_cache[cache_id] = matching
3125 return matching
3127 # With shared folders
3128 checked_hosts = set()
3129 for hostname in valid_hosts:
3130 if hostname in checked_hosts:
3131 continue
3133 hosts_with_same_tag = self.filter_hosts_with_same_tags_as_host(hostname, valid_hosts)
3134 checked_hosts.update(hosts_with_same_tag)
3136 if not positive_match_tags - self._hosttags[hostname]:
3137 if not negative_match_tags.intersection(self._hosttags[hostname]):
3138 matching.update(hosts_with_same_tag)
3140 self._all_matching_hosts_match_cache[cache_id] = matching
3141 return matching
3143 def host_extra_conf_merged(self, hostname, conf):
3144 rule_dict = {}
3145 for rule in self.host_extra_conf(hostname, conf):
3146 for key, value in rule.items():
3147 rule_dict.setdefault(key, value)
3148 return rule_dict
3150 def host_extra_conf(self, hostname, ruleset):
3151 with_foreign_hosts = hostname not in self._all_processed_hosts
3152 cache_id = id(ruleset), with_foreign_hosts
3153 try:
3154 return self._host_extra_conf_match_cache[cache_id][hostname]
3155 except KeyError:
3156 pass
3158 try:
3159 ruleset = self._host_extra_conf_ruleset_cache[cache_id]
3160 except KeyError:
3161 ruleset = self._convert_host_ruleset(ruleset, with_foreign_hosts)
3162 self._host_extra_conf_ruleset_cache[cache_id] = ruleset
3163 new_cache = {}
3164 for value, hostname_list in ruleset:
3165 for other_hostname in hostname_list:
3166 new_cache.setdefault(other_hostname, []).append(value)
3167 self._host_extra_conf_match_cache[cache_id] = new_cache
3169 if hostname not in self._host_extra_conf_match_cache[cache_id]:
3170 return []
3172 return self._host_extra_conf_match_cache[cache_id][hostname]
3174 def _convert_host_ruleset(self, ruleset, with_foreign_hosts):
3175 new_rules = []
3176 if len(ruleset) == 1 and ruleset[0] == "":
3177 console.warning('deprecated entry [ "" ] in host configuration list')
3179 for rule in ruleset:
3180 item, tags, hostlist, rule_options = parse_host_rule(rule)
3181 if rule_options.get("disabled"):
3182 continue
3184 # Directly compute set of all matching hosts here, this
3185 # will avoid recomputation later
3186 new_rules.append((item, self.all_matching_hosts(tags, hostlist, with_foreign_hosts)))
3188 return new_rules
3190 def service_extra_conf(self, hostname, service, ruleset):
3191 """Compute outcome of a service rule set that has an item."""
3192 # When the requested host is part of the local sites configuration,
3193 # then use only the sites hosts for processing the rules
3194 with_foreign_hosts = hostname not in self._all_processed_hosts
3195 cache_id = id(ruleset), with_foreign_hosts
3197 cached_ruleset = self._service_extra_conf_ruleset_cache.get(cache_id)
3198 if cached_ruleset is None:
3199 cached_ruleset = self._convert_service_ruleset(
3200 ruleset, with_foreign_hosts=with_foreign_hosts)
3201 self._service_extra_conf_ruleset_cache[cache_id] = cached_ruleset
3203 entries = []
3205 for value, hosts, service_matchers in cached_ruleset:
3206 if hostname not in hosts:
3207 continue
3209 descr_cache_id = service_matchers, service
3211 # 20% faster without exception handling
3212 # self._profile_log("descr cache id %r" % (descr_cache_id))
3213 match = self._service_extra_conf_match_cache.get(descr_cache_id)
3214 if match is None:
3215 match = _in_servicematcher_list(service_matchers, service)
3216 self._service_extra_conf_match_cache[descr_cache_id] = match
3218 if match:
3219 entries.append(value)
3221 return entries
3223 def service_extra_conf_merged(self, hostname, service, ruleset):
3224 rule_dict = {}
3225 for rule in self.service_extra_conf(hostname, service, ruleset):
3226 for key, value in rule.items():
3227 rule_dict.setdefault(key, value)
3228 return rule_dict
3230 def _convert_service_ruleset(self, ruleset, with_foreign_hosts):
3231 new_rules = []
3232 for rule in ruleset:
3233 rule, rule_options = get_rule_options(rule)
3234 if rule_options.get("disabled"):
3235 continue
3237 num_elements = len(rule)
3238 if num_elements == 3:
3239 item, hostlist, servlist = rule
3240 tags = []
3241 elif num_elements == 4:
3242 item, tags, hostlist, servlist = rule
3243 else:
3244 raise MKGeneralException("Invalid rule '%r' in service configuration "
3245 "list: must have 3 or 4 elements" % (rule,))
3247 # Directly compute set of all matching hosts here, this
3248 # will avoid recomputation later
3249 hosts = self.all_matching_hosts(tags, hostlist, with_foreign_hosts)
3251 # And now preprocess the configured patterns in the servlist
3252 new_rules.append((item, hosts, _convert_pattern_list(servlist)))
3254 return new_rules
3256 # Compute outcome of a service rule set that just say yes/no
3257 def in_boolean_serviceconf_list(self, hostname, descr, ruleset):
3258 # When the requested host is part of the local sites configuration,
3259 # then use only the sites hosts for processing the rules
3260 with_foreign_hosts = hostname not in self._all_processed_hosts
3261 cache_id = id(ruleset), with_foreign_hosts
3262 try:
3263 ruleset = self._in_boolean_service_conf_list_ruleset_cache[cache_id]
3264 except KeyError:
3265 ruleset = self._convert_boolean_service_ruleset(ruleset, with_foreign_hosts)
3266 self._in_boolean_service_conf_list_ruleset_cache[cache_id] = ruleset
3268 for negate, hosts, service_matchers in ruleset:
3269 if hostname in hosts:
3270 cache_id = service_matchers, descr
3271 try:
3272 match = self._in_boolean_service_conf_list_match_cache[cache_id]
3273 except KeyError:
3274 match = _in_servicematcher_list(service_matchers, descr)
3275 self._in_boolean_service_conf_list_match_cache[cache_id] = match
3277 if match:
3278 return not negate
3279 return False # no match. Do not ignore
3281 def _convert_boolean_service_ruleset(self, ruleset, with_foreign_hosts):
3282 new_rules = []
3283 for rule in ruleset:
3284 entry, rule_options = get_rule_options(rule)
3285 if rule_options.get("disabled"):
3286 continue
3288 if entry[0] == NEGATE: # this entry is logically negated
3289 negate = True
3290 entry = entry[1:]
3291 else:
3292 negate = False
3294 if len(entry) == 2:
3295 hostlist, servlist = entry
3296 tags = []
3297 elif len(entry) == 3:
3298 tags, hostlist, servlist = entry
3299 else:
3300 raise MKGeneralException("Invalid entry '%r' in configuration: "
3301 "must have 2 or 3 elements" % (entry,))
3303 # Directly compute set of all matching hosts here, this
3304 # will avoid recomputation later
3305 hosts = self.all_matching_hosts(tags, hostlist, with_foreign_hosts)
3306 new_rules.append((negate, hosts, _convert_pattern_list(servlist)))
3308 return new_rules
3310 def _setup_clusters_nodes_cache(self):
3311 for cluster, hosts in clusters.items():
3312 clustername = cluster.split('|', 1)[0]
3313 for name in hosts:
3314 self._clusters_of_cache.setdefault(name, []).append(clustername)
3315 self._nodes_of_cache[clustername] = hosts
3317 # Return a list of the cluster host names.
3318 def clusters_of(self, hostname):
3319 return self._clusters_of_cache.get(hostname, [])
3321 # TODO: cleanup none
3322 # Returns the nodes of a cluster. Returns None if no match
3323 def nodes_of(self, hostname):
3324 return self._nodes_of_cache.get(hostname)
3326 # Determine weather a service (found on a physical host) is a clustered
3327 # service and - if yes - return the cluster host of the service. If
3328 # no, returns the hostname of the physical host.
3329 def host_of_clustered_service(self, hostname, servicedesc, part_of_clusters=None):
3330 if part_of_clusters:
3331 the_clusters = part_of_clusters
3332 else:
3333 the_clusters = self.clusters_of(hostname)
3335 if not the_clusters:
3336 return hostname
3338 cluster_mapping = self.service_extra_conf(hostname, servicedesc, clustered_services_mapping)
3339 for cluster in cluster_mapping:
3340 # Check if the host is in this cluster
3341 if cluster in the_clusters:
3342 return cluster
3344 # 1. New style: explicitly assigned services
3345 for cluster, conf in clustered_services_of.iteritems():
3346 nodes = nodes_of(cluster)
3347 if not nodes:
3348 raise MKGeneralException(
3349 "Invalid entry clustered_services_of['%s']: %s is not a cluster." % (cluster,
3350 cluster))
3351 if hostname in nodes and \
3352 self.in_boolean_serviceconf_list(hostname, servicedesc, conf):
3353 return cluster
3355 # 1. Old style: clustered_services assumes that each host belong to
3356 # exactly on cluster
3357 if self.in_boolean_serviceconf_list(hostname, servicedesc, clustered_services):
3358 return the_clusters[0]
3360 return hostname
3362 def in_binary_hostlist(self, hostname, conf):
3363 cache = self._in_binary_hostlist_cache
3365 cache_id = id(conf), hostname
3366 try:
3367 return cache[cache_id]
3368 except KeyError:
3369 pass
3371 # if we have just a list of strings just take it as list of hostnames
3372 if conf and isinstance(conf[0], str):
3373 result = hostname in conf
3374 cache[cache_id] = result
3375 else:
3376 for entry in conf:
3377 actual_host_tags = self.tag_list_of_host(hostname)
3378 entry, rule_options = get_rule_options(entry)
3379 if rule_options.get("disabled"):
3380 continue
3382 try:
3383 # Negation via 'NEGATE'
3384 if entry[0] == NEGATE:
3385 entry = entry[1:]
3386 negate = True
3387 else:
3388 negate = False
3389 # entry should be one-tuple or two-tuple. Tuple's elements are
3390 # lists of strings. User might forget comma in one tuple. Then the
3391 # entry is the list itself.
3392 if isinstance(entry, list):
3393 hostlist = entry
3394 tags = []
3395 else:
3396 if len(entry) == 1: # 1-Tuple with list of hosts
3397 hostlist = entry[0]
3398 tags = []
3399 else:
3400 tags, hostlist = entry
3402 if hosttags_match_taglist(actual_host_tags, tags) and \
3403 in_extraconf_hostlist(hostlist, hostname):
3404 cache[cache_id] = not negate
3405 break
3406 except:
3407 # TODO: Fix this too generic catching (+ bad error message)
3408 raise MKGeneralException("Invalid entry '%r' in host configuration list: "
3409 "must be tuple with 1 or 2 entries" % (entry,))
3410 else:
3411 cache[cache_id] = False
3413 return cache[cache_id]
3416 def get_config_cache():
3417 config_cache = cmk_base.config_cache.get_dict("config_cache")
3418 if not config_cache:
3419 config_cache["cache"] = ConfigCache()
3420 return config_cache["cache"]