Cleanup all direct config.service_extra_conf calls
[check_mk.git] / cmk_base / config.py
blob7e731e6791cdc4aa81d5d7f583b87252bbf70d52
1 #!/usr/bin/env python
2 # -*- encoding: utf-8; py-indent-offset: 4 -*-
3 # +------------------------------------------------------------------+
4 # | ____ _ _ __ __ _ __ |
5 # | / ___| |__ ___ ___| | __ | \/ | |/ / |
6 # | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
7 # | | |___| | | | __/ (__| < | | | | . \ |
8 # | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
9 # | |
10 # | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
11 # +------------------------------------------------------------------+
13 # This file is part of Check_MK.
14 # The official homepage is at http://mathias-kettner.de/check_mk.
16 # check_mk is free software; you can redistribute it and/or modify it
17 # under the terms of the GNU General Public License as published by
18 # the Free Software Foundation in version 2. check_mk is distributed
19 # in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
20 # out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
21 # PARTICULAR PURPOSE. See the GNU General Public License for more de-
22 # tails. You should have received a copy of the GNU General Public
23 # License along with GNU Make; see the file COPYING. If not, write
24 # to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
25 # Boston, MA 02110-1301 USA.
27 from collections import OrderedDict
28 import ast
29 import copy
30 import inspect
31 import marshal
32 import numbers
33 import os
34 import py_compile
35 import struct
36 import sys
37 from typing import Any, Callable, Dict, List, Tuple, Union, Optional # pylint: disable=unused-import
39 import six
41 import cmk.utils.debug
42 import cmk.utils.paths
43 from cmk.utils.regex import regex, is_regex
44 import cmk.utils.translations
45 import cmk.utils.rulesets.tuple_rulesets
46 import cmk.utils.store as store
47 import cmk.utils
48 from cmk.utils.exceptions import MKGeneralException, MKTerminate
50 import cmk_base
51 import cmk_base.console as console
52 import cmk_base.default_config as default_config
53 import cmk_base.check_utils
54 import cmk_base.utils
55 import cmk_base.check_api_utils as check_api_utils
56 import cmk_base.cleanup
57 import cmk_base.piggyback as piggyback
58 from cmk_base.discovered_labels import DiscoveredHostLabelsStore
60 # TODO: Prefix helper functions with "_".
62 # This is mainly needed for pylint to detect all available
63 # configuration options during static analysis. The defaults
64 # are loaded later with load_default_config() again.
65 from cmk_base.default_config import * # pylint: disable=wildcard-import,unused-wildcard-import
67 service_service_levels = [] # type: ignore
68 host_service_levels = [] # type: ignore
71 class TimespecificParamList(list):
72 pass
75 def get_variable_names():
76 """Provides the list of all known configuration variables."""
77 return [k for k in default_config.__dict__ if k[0] != "_"]
80 def get_default_config():
81 """Provides a dictionary containing the Check_MK default configuration"""
82 cfg = {}
83 for key in get_variable_names():
84 value = getattr(default_config, key)
86 if isinstance(value, (dict, list)):
87 value = copy.deepcopy(value)
89 cfg[key] = value
90 return cfg
93 def load_default_config():
94 globals().update(get_default_config())
97 def register(name, default_value):
98 """Register a new configuration variable within Check_MK base."""
99 setattr(default_config, name, default_value)
102 def _add_check_variables_to_default_config():
103 """Add configuration variables registered by checks to config module"""
104 default_config.__dict__.update(get_check_variable_defaults())
107 def _clear_check_variables_from_default_config(variable_names):
108 """Remove previously registered check variables from the config module"""
109 for varname in variable_names:
110 try:
111 delattr(default_config, varname)
112 except AttributeError:
113 pass
116 # Load user configured values of check related configuration variables
117 # into the check module to make it available during checking.
119 # In the same step we remove the check related configuration settings from the
120 # config module because they are not needed there anymore.
122 # And also remove it from the default config (in case it was present)
123 def set_check_variables_for_checks():
124 global_dict = globals()
125 cvn = check_variable_names()
127 check_variables = {}
128 for varname in cvn:
129 check_variables[varname] = global_dict.pop(varname)
131 set_check_variables(check_variables)
132 _clear_check_variables_from_default_config(cvn)
136 # .--Read Config---------------------------------------------------------.
137 # | ____ _ ____ __ _ |
138 # | | _ \ ___ __ _ __| | / ___|___ _ __ / _(_) __ _ |
139 # | | |_) / _ \/ _` |/ _` | | | / _ \| '_ \| |_| |/ _` | |
140 # | | _ < __/ (_| | (_| | | |__| (_) | | | | _| | (_| | |
141 # | |_| \_\___|\__,_|\__,_| \____\___/|_| |_|_| |_|\__, | |
142 # | |___/ |
143 # +----------------------------------------------------------------------+
144 # | Code for reading the configuration files. |
145 # '----------------------------------------------------------------------'
148 def load(with_conf_d=True, validate_hosts=True, exclude_parents_mk=False):
149 _initialize_config()
151 vars_before_config = all_nonfunction_vars()
153 _load_config(with_conf_d, exclude_parents_mk)
154 _transform_mgmt_config_vars_from_140_to_150()
155 _initialize_derived_config_variables()
157 _perform_post_config_loading_actions()
159 if validate_hosts:
160 _verify_non_duplicate_hosts()
162 # Such validation only makes sense when all checks have been loaded
163 if all_checks_loaded():
164 verify_non_invalid_variables(vars_before_config)
165 _verify_no_deprecated_check_rulesets()
167 verify_snmp_communities_type()
170 def load_packed_config():
171 """Load the configuration for the CMK helpers of CMC
173 These files are written by PackedConfig().
175 Should have a result similar to the load() above. With the exception that the
176 check helpers would only need check related config variables.
178 The validations which are performed during load() also don't need to be performed.
180 PackedConfig().load()
183 def _initialize_config():
184 _add_check_variables_to_default_config()
185 load_default_config()
188 def _perform_post_config_loading_actions():
189 """These tasks must be performed after loading the Check_MK base configuration"""
190 # First cleanup things (needed for e.g. reloading the config)
191 cmk_base.config_cache.clear_all()
193 get_config_cache().initialize()
195 # In case the checks are not loaded yet it seems the current mode
196 # is not working with the checks. In this case also don't load the
197 # static checks into the configuration.
198 if any_check_loaded():
199 add_wato_static_checks_to_checks()
200 initialize_check_caches()
201 set_check_variables_for_checks()
204 def _load_config(with_conf_d, exclude_parents_mk):
205 helper_vars = {
206 "FOLDER_PATH": None,
209 global_dict = globals()
210 global_dict.update(helper_vars)
212 for _f in _get_config_file_paths(with_conf_d):
213 # During parent scan mode we must not read in old version of parents.mk!
214 if exclude_parents_mk and _f.endswith("/parents.mk"):
215 continue
217 try:
218 _hosts_before = set(all_hosts)
219 _clusters_before = set(clusters.keys())
221 # Make the config path available as a global variable to
222 # be used within the configuration file
223 if _f.startswith(cmk.utils.paths.check_mk_config_dir + "/"):
224 _file_path = _f[len(cmk.utils.paths.check_mk_config_dir) + 1:]
225 global_dict.update({
226 "FOLDER_PATH": os.path.dirname(_file_path),
228 else:
229 global_dict.update({
230 "FOLDER_PATH": None,
233 execfile(_f, global_dict, global_dict)
235 _new_hosts = set(all_hosts).difference(_hosts_before)
236 _new_clusters = set(clusters.keys()).difference(_clusters_before)
238 set_folder_paths(_new_hosts.union(_new_clusters), _f)
239 except Exception as e:
240 if cmk.utils.debug.enabled():
241 raise
242 elif sys.stderr.isatty():
243 console.error("Cannot read in configuration file %s: %s\n", _f, e)
244 sys.exit(1)
246 # Cleanup global helper vars
247 for helper_var in helper_vars:
248 del global_dict[helper_var]
251 def _transform_mgmt_config_vars_from_140_to_150():
252 #FIXME We have to transform some configuration variables from host attributes
253 # to cmk_base configuration variables because during the migration step from
254 # 1.4.0 to 1.5.0 some config variables are not known in cmk_base. These variables
255 # are 'management_protocol' and 'management_snmp_community'.
256 # Clean this up one day!
257 for hostname, attributes in host_attributes.iteritems():
258 for name, var in [
259 ('management_protocol', management_protocol),
260 ('management_snmp_community', management_snmp_credentials),
262 if attributes.get(name):
263 var.setdefault(hostname, attributes[name])
266 # Create list of all files to be included during configuration loading
267 def _get_config_file_paths(with_conf_d):
268 if with_conf_d:
269 list_of_files = sorted(
270 reduce(lambda a, b: a + b,
271 [["%s/%s" % (d, f)
272 for f in fs
273 if f.endswith(".mk")]
274 for d, _unused_sb, fs in os.walk(cmk.utils.paths.check_mk_config_dir)], []),
275 cmp=cmk.utils.cmp_config_paths)
276 list_of_files = [cmk.utils.paths.main_config_file] + list_of_files
277 else:
278 list_of_files = [cmk.utils.paths.main_config_file]
280 for path in [cmk.utils.paths.final_config_file, cmk.utils.paths.local_config_file]:
281 if os.path.exists(path):
282 list_of_files.append(path)
284 return list_of_files
287 def _initialize_derived_config_variables():
288 global service_service_levels, host_service_levels
289 service_service_levels = extra_service_conf.get("_ec_sl", [])
290 host_service_levels = extra_host_conf.get("_ec_sl", [])
293 def get_derived_config_variable_names():
294 """These variables are computed from other configuration variables and not configured directly.
296 The origin variable (extra_service_conf) should not be exported to the helper config. Only
297 the service levels are needed."""
298 return set(["service_service_levels", "host_service_levels"])
301 def _verify_non_duplicate_hosts():
302 duplicates = duplicate_hosts()
303 if duplicates:
304 # TODO: Raise an exception
305 console.error("Error in configuration: duplicate hosts: %s\n", ", ".join(duplicates))
306 sys.exit(3)
309 # Add WATO-configured explicit checks to (possibly empty) checks
310 # statically defined in checks.
311 def add_wato_static_checks_to_checks():
312 global checks
314 static = []
315 for entries in static_checks.values():
316 for entry in entries:
317 entry, rule_options = get_rule_options(entry)
318 if rule_options.get("disabled"):
319 continue
321 # Parameters are optional
322 if len(entry[0]) == 2:
323 checktype, item = entry[0]
324 params = None
325 else:
326 checktype, item, params = entry[0]
327 if len(entry) == 3:
328 taglist, hostlist = entry[1:3]
329 else:
330 hostlist = entry[1]
331 taglist = []
333 # Do not process manual checks that are related to not existing or have not
334 # loaded check files
335 try:
336 check_plugin_info = check_info[checktype]
337 except KeyError:
338 continue
340 # Make sure, that for dictionary based checks
341 # at least those keys defined in the factory
342 # settings are present in the parameters
343 if isinstance(params, dict):
344 def_levels_varname = check_plugin_info.get("default_levels_variable")
345 if def_levels_varname:
346 for key, value in factory_settings.get(def_levels_varname, {}).items():
347 if key not in params:
348 params[key] = value
350 static.append((taglist, hostlist, checktype, item, params))
352 # Note: We need to reverse the order of the static_checks. This is because
353 # users assume that earlier rules have precedence over later ones. For static
354 # checks that is important if there are two rules for a host with the same
355 # combination of check type and item. When the variable 'checks' is evaluated,
356 # *later* rules have precedence. This is not consistent with the rest, but a
357 # result of this "historic implementation".
358 static.reverse()
360 # Now prepend to checks. That makes that checks variable have precedence
361 # over WATO.
362 checks = static + checks
365 def initialize_check_caches():
366 single_host_checks = cmk_base.config_cache.get_dict("single_host_checks")
367 multi_host_checks = cmk_base.config_cache.get_list("multi_host_checks")
369 for entry in checks:
370 if len(entry) == 4 and isinstance(entry[0], str):
371 single_host_checks.setdefault(entry[0], []).append(entry)
372 else:
373 multi_host_checks.append(entry)
376 def set_folder_paths(new_hosts, filename):
377 if not filename.startswith(cmk.utils.paths.check_mk_config_dir):
378 return
380 path = filename[len(cmk.utils.paths.check_mk_config_dir):]
382 for hostname in strip_tags(new_hosts):
383 host_paths[hostname] = path
386 def verify_non_invalid_variables(vars_before_config):
387 # Check for invalid configuration variables
388 vars_after_config = all_nonfunction_vars()
389 ignored_variables = set([
390 'vars_before_config', 'parts', 'seen_hostnames', 'taggedhost', 'hostname',
391 'service_service_levels', 'host_service_levels'
394 found_invalid = 0
395 for name in vars_after_config:
396 if name not in ignored_variables and name not in vars_before_config:
397 console.error("Invalid configuration variable '%s'\n", name)
398 found_invalid += 1
400 if found_invalid:
401 console.error("--> Found %d invalid variables\n" % found_invalid)
402 console.error("If you use own helper variables, please prefix them with _.\n")
403 sys.exit(1)
406 def verify_snmp_communities_type():
407 # Special handling for certain deprecated variables
408 if isinstance(snmp_communities, dict):
409 console.error("ERROR: snmp_communities cannot be a dict any more.\n")
410 sys.exit(1)
413 def _verify_no_deprecated_check_rulesets():
414 deprecated_rulesets = [
415 ("services", "inventory_services"),
416 ("domino_tasks", "inv_domino_tasks"),
417 ("ps", "inventory_processes"),
418 ("logwatch", "logwatch_patterns"),
420 for check_plugin_name, varname in deprecated_rulesets:
421 check_context = get_check_context(check_plugin_name)
422 if check_context[varname]:
423 console.warning(
424 "Found rules for deprecated ruleset %r. These rules are not applied "
425 "anymore. In case you still need them, you need to migrate them by hand. "
426 "Otherwise you can remove them from your configuration." % varname)
429 def all_nonfunction_vars():
430 return set(
431 [name for name, value in globals().items() if name[0] != '_' and not callable(value)])
434 class PackedConfig(object):
435 """The precompiled host checks and the CMC Check_MK helpers use a
436 "precompiled" part of the Check_MK configuration during runtime.
438 a) They must not use the live config from etc/check_mk during
439 startup. They are only allowed to load the config activated by
440 the user.
442 b) They must not load the whole Check_MK config. Because they only
443 need the options needed for checking
446 # These variables are part of the Check_MK configuration, but are not needed
447 # by the Check_MK keepalive mode, so exclude them from the packed config
448 _skipped_config_variable_names = [
449 "define_contactgroups",
450 "define_hostgroups",
451 "define_servicegroups",
452 "service_contactgroups",
453 "host_contactgroups",
454 "service_groups",
455 "host_groups",
456 "contacts",
457 "timeperiods",
458 "extra_service_conf",
459 "extra_nagios_conf",
462 def __init__(self):
463 super(PackedConfig, self).__init__()
464 self._path = os.path.join(cmk.utils.paths.var_dir, "base", "precompiled_check_config.mk")
466 def save(self):
467 self._write(self._pack())
469 def _pack(self):
470 helper_config = ("#!/usr/bin/env python\n"
471 "# encoding: utf-8\n"
472 "# Created by Check_MK. Dump of the currently active configuration\n\n")
474 # These functions purpose is to filter out hosts which are monitored on different sites
475 active_hosts = all_active_hosts()
476 active_clusters = all_active_clusters()
478 def filter_all_hosts(all_hosts_orig):
479 all_hosts_red = []
480 for host_entry in all_hosts_orig:
481 hostname = host_entry.split("|", 1)[0]
482 if hostname in active_hosts:
483 all_hosts_red.append(host_entry)
484 return all_hosts_red
486 def filter_clusters(clusters_orig):
487 clusters_red = {}
488 for cluster_entry, cluster_nodes in clusters_orig.items():
489 clustername = cluster_entry.split("|", 1)[0]
490 if clustername in active_clusters:
491 clusters_red[cluster_entry] = cluster_nodes
492 return clusters_red
494 def filter_hostname_in_dict(values):
495 values_red = {}
496 for hostname, attributes in values.items():
497 if hostname in active_hosts:
498 values_red[hostname] = attributes
499 return values_red
501 filter_var_functions = {
502 "all_hosts": filter_all_hosts,
503 "clusters": filter_clusters,
504 "host_attributes": filter_hostname_in_dict,
505 "ipaddresses": filter_hostname_in_dict,
506 "ipv6addresses": filter_hostname_in_dict,
507 "explicit_snmp_communities": filter_hostname_in_dict,
508 "hosttags": filter_hostname_in_dict
512 # Add modified Check_MK base settings
515 variable_defaults = get_default_config()
516 derived_config_variable_names = get_derived_config_variable_names()
518 global_variables = globals()
520 for varname in get_variable_names() + list(derived_config_variable_names):
521 if varname in self._skipped_config_variable_names:
522 continue
524 val = global_variables[varname]
526 if varname not in derived_config_variable_names and val == variable_defaults[varname]:
527 continue
529 if not self._packable(varname, val):
530 continue
532 if varname in filter_var_functions:
533 val = filter_var_functions[varname](val)
535 helper_config += "\n%s = %r\n" % (varname, val)
538 # Add modified check specific Check_MK base settings
541 check_variable_defaults = get_check_variable_defaults()
543 for varname, val in get_check_variables().items():
544 if val == check_variable_defaults[varname]:
545 continue
547 if not self._packable(varname, val):
548 continue
550 helper_config += "\n%s = %r\n" % (varname, val)
552 return helper_config
554 def _packable(self, varname, val):
555 """Checks whether or not a variable can be written to the config.mk
556 and read again from it."""
557 if isinstance(val, six.string_types + (int, bool)) or not val:
558 return True
560 try:
561 eval(repr(val))
562 return True
563 except:
564 return False
566 def _write(self, helper_config):
567 store.makedirs(os.path.dirname(self._path))
569 store.save_file(self._path + ".orig", helper_config + "\n")
571 code = compile(helper_config, '<string>', 'exec')
572 with open(self._path + ".compiled", "w") as compiled_file:
573 marshal.dump(code, compiled_file)
575 os.rename(self._path + ".compiled", self._path)
577 def load(self):
578 _initialize_config()
579 exec (marshal.load(open(self._path)), globals())
580 _perform_post_config_loading_actions()
584 # .--Host tags-----------------------------------------------------------.
585 # | _ _ _ _ |
586 # | | | | | ___ ___| |_ | |_ __ _ __ _ ___ |
587 # | | |_| |/ _ \/ __| __| | __/ _` |/ _` / __| |
588 # | | _ | (_) \__ \ |_ | || (_| | (_| \__ \ |
589 # | |_| |_|\___/|___/\__| \__\__,_|\__, |___/ |
590 # | |___/ |
591 # +----------------------------------------------------------------------+
592 # | Helper functions for dealing with host tags |
593 # '----------------------------------------------------------------------'
596 def strip_tags(tagged_hostlist):
597 cache = cmk_base.config_cache.get_dict("strip_tags")
599 cache_id = tuple(tagged_hostlist)
600 try:
601 return cache[cache_id]
602 except KeyError:
603 result = [h.split('|', 1)[0] for h in tagged_hostlist]
604 cache[cache_id] = result
605 return result
609 # .--HostCollections-----------------------------------------------------.
610 # | _ _ _ ____ _ _ _ _ |
611 # || | | | ___ ___| |_ / ___|___ | | | ___ ___| |_(_) ___ _ __ ___ |
612 # || |_| |/ _ \/ __| __| | / _ \| | |/ _ \/ __| __| |/ _ \| '_ \/ __| |
613 # || _ | (_) \__ \ |_| |__| (_) | | | __/ (__| |_| | (_) | | | \__ \ |
614 # ||_| |_|\___/|___/\__|\____\___/|_|_|\___|\___|\__|_|\___/|_| |_|___/ |
615 # | |
616 # +----------------------------------------------------------------------+
617 # | |
618 # '----------------------------------------------------------------------'
621 # Returns a set of all active hosts
622 def all_active_hosts():
623 cache = cmk_base.config_cache.get_set("all_active_hosts")
624 if not cache.is_populated():
625 cache.update(all_active_realhosts(), all_active_clusters())
626 cache.set_populated()
627 return cache
630 # Returns a set of all host names to be handled by this site
631 # hosts of other sitest or disabled hosts are excluded
632 def all_active_realhosts():
633 active_realhosts = cmk_base.config_cache.get_set("active_realhosts")
635 if not active_realhosts.is_populated():
636 active_realhosts.update(filter_active_hosts(all_configured_realhosts()))
637 active_realhosts.set_populated()
639 return active_realhosts
642 # Returns a set of all cluster host names to be handled by
643 # this site hosts of other sitest or disabled hosts are excluded
644 def all_active_clusters():
645 active_clusters = cmk_base.config_cache.get_set("active_clusters")
647 if not active_clusters.is_populated():
648 active_clusters.update(filter_active_hosts(all_configured_clusters()))
649 active_clusters.set_populated()
651 return active_clusters
654 # Returns a set of all hosts, regardless if currently
655 # disabled or monitored on a remote site.
656 def all_configured_hosts():
657 cache = cmk_base.config_cache.get_set("all_configured_hosts")
658 if not cache.is_populated():
659 cache.update(all_configured_realhosts(), all_configured_clusters())
660 cache.set_populated()
661 return cache
664 # Returns a set of all host names, regardless if currently
665 # disabled or monitored on a remote site. Does not return
666 # cluster hosts.
667 def all_configured_realhosts():
668 cache = cmk_base.config_cache.get_set("all_configured_realhosts")
669 if not cache.is_populated():
670 cache.update(strip_tags(all_hosts))
671 cache.set_populated()
672 return cache
675 # Returns a set of all cluster names, regardless if currently
676 # disabled or monitored on a remote site. Does not return
677 # normal hosts.
678 def all_configured_clusters():
679 cache = cmk_base.config_cache.get_set("all_configured_clusters")
680 if not cache.is_populated():
681 cache.update(strip_tags(clusters.keys()))
682 cache.set_populated()
683 return cache
686 # This function should only be used during duplicate host check! It has to work like
687 # all_active_hosts() but with the difference that duplicates are not removed.
688 def all_active_hosts_with_duplicates():
689 # Only available with CEE
690 if "shadow_hosts" in globals():
691 shadow_host_entries = shadow_hosts.keys()
692 else:
693 shadow_host_entries = []
695 return filter_active_hosts(strip_tags(all_hosts) \
696 + strip_tags(clusters.keys()) \
697 + strip_tags(shadow_host_entries), keep_duplicates=True)
700 # Returns a set of active hosts for this site
701 def filter_active_hosts(hostlist, keep_offline_hosts=False, keep_duplicates=False):
702 if only_hosts is None and distributed_wato_site is None:
703 active_hosts = hostlist
705 elif only_hosts is None:
706 active_hosts = [
707 hostname for hostname in hostlist
708 if host_is_member_of_site(hostname, distributed_wato_site)
711 elif distributed_wato_site is None:
712 if keep_offline_hosts:
713 active_hosts = hostlist
714 else:
715 active_hosts = [
716 hostname for hostname in hostlist if in_binary_hostlist(hostname, only_hosts)
719 else:
720 active_hosts = [
721 hostname for hostname in hostlist
722 if (keep_offline_hosts or in_binary_hostlist(hostname, only_hosts)) and
723 host_is_member_of_site(hostname, distributed_wato_site)
726 if keep_duplicates:
727 return active_hosts
729 return set(active_hosts)
732 def duplicate_hosts():
733 seen_hostnames = set([])
734 duplicates = set([])
736 for hostname in all_active_hosts_with_duplicates():
737 if hostname in seen_hostnames:
738 duplicates.add(hostname)
739 else:
740 seen_hostnames.add(hostname)
742 return sorted(list(duplicates))
745 # Returns a list of all hosts which are associated with this site,
746 # but have been removed by the "only_hosts" rule. Normally these
747 # are the hosts which have the tag "offline".
749 # This is not optimized for performance, so use in specific situations.
750 def all_offline_hosts():
751 hostlist = filter_active_hosts(
752 all_configured_realhosts().union(all_configured_clusters()), keep_offline_hosts=True)
754 return [hostname for hostname in hostlist if not in_binary_hostlist(hostname, only_hosts)]
757 def all_configured_offline_hosts():
758 hostlist = all_configured_realhosts().union(all_configured_clusters())
760 return set([hostname for hostname in hostlist if not in_binary_hostlist(hostname, only_hosts)])
764 # .--Hosts---------------------------------------------------------------.
765 # | _ _ _ |
766 # | | | | | ___ ___| |_ ___ |
767 # | | |_| |/ _ \/ __| __/ __| |
768 # | | _ | (_) \__ \ |_\__ \ |
769 # | |_| |_|\___/|___/\__|___/ |
770 # | |
771 # +----------------------------------------------------------------------+
772 # | Helper functions for dealing with hosts. |
773 # '----------------------------------------------------------------------'
776 def host_is_member_of_site(hostname, site):
777 for tag in get_config_cache().get_host_config(hostname).tags:
778 if tag.startswith("site:"):
779 return site == tag[5:]
780 # hosts without a site: tag belong to all sites
781 return True
784 def alias_of(hostname, fallback):
785 aliases = host_extra_conf(hostname, extra_host_conf.get("alias", []))
786 if len(aliases) == 0:
787 if fallback:
788 return fallback
790 return hostname
792 return aliases[0]
795 def get_additional_ipaddresses_of(hostname):
796 #TODO Regarding the following configuration variables from WATO
797 # there's no inheritance, thus we use 'host_attributes'.
798 # Better would be to use cmk_base configuration variables,
799 # eg. like 'management_protocol'.
800 return (host_attributes.get(hostname, {}).get("additional_ipv4addresses", []),
801 host_attributes.get(hostname, {}).get("additional_ipv6addresses", []))
804 def parents_of(hostname):
805 par = host_extra_conf(hostname, parents)
806 # Use only those parents which are defined and active in
807 # all_hosts.
808 used_parents = []
809 for p in par:
810 ps = p.split(",")
811 for pss in ps:
812 if pss in all_active_realhosts():
813 used_parents.append(pss)
814 return used_parents
817 # If host is node of one or more clusters, return a list of the cluster host names.
818 # If not, return an empty list.
819 # TODO: Replace call sites with HostConfig access and remove this
820 def clusters_of(hostname):
821 return get_config_cache().get_host_config(hostname).part_of_clusters
825 # IPv4/IPv6
829 # TODO: Replace call sites with HostConfig access and remove this
830 def is_ipv6_primary(hostname):
831 return get_config_cache().get_host_config(hostname).is_ipv6_primary
834 # TODO: Replace call sites with HostConfig access and remove this
835 def is_ipv4v6_host(hostname):
836 return get_config_cache().get_host_config(hostname).is_ipv4v6_host
839 # TODO: Replace call sites with HostConfig access and remove this
840 def is_ipv6_host(hostname):
841 return get_config_cache().get_host_config(hostname).is_ipv6_host
844 # TODO: Replace call sites with HostConfig access and remove this
845 def is_ipv4_host(hostname):
846 return get_config_cache().get_host_config(hostname).is_ipv4_host
849 # TODO: Replace call sites with HostConfig access and remove this
850 def is_no_ip_host(hostname):
851 return get_config_cache().get_host_config(hostname).is_no_ip_host
855 # Management board
859 def management_address_of(hostname):
860 attributes_of_host = host_attributes.get(hostname, {})
861 if attributes_of_host.get("management_address"):
862 return attributes_of_host["management_address"]
864 return ipaddresses.get(hostname)
867 def management_credentials_of(hostname):
868 protocol = get_config_cache().get_host_config(hostname).management_protocol
869 if protocol == "snmp":
870 credentials_variable, default_value = management_snmp_credentials, snmp_default_community
871 elif protocol == "ipmi":
872 credentials_variable, default_value = management_ipmi_credentials, None
873 elif protocol is None:
874 return None
875 else:
876 raise NotImplementedError()
878 # First try to use the explicit configuration of the host
879 # (set directly for a host or via folder inheritance in WATO)
880 try:
881 return credentials_variable[hostname]
882 except KeyError:
883 pass
885 # If a rule matches, use the first rule for the management board protocol of the host
886 rule_settings = host_extra_conf(hostname, management_board_config)
887 for rule_protocol, credentials in rule_settings:
888 if rule_protocol == protocol:
889 return credentials
891 return default_value
895 # Agent communication
899 def agent_port_of(hostname):
900 ports = host_extra_conf(hostname, agent_ports)
901 if len(ports) == 0:
902 return agent_port
904 return ports[0]
907 def tcp_connect_timeout_of(hostname):
908 timeouts = host_extra_conf(hostname, tcp_connect_timeouts)
909 if len(timeouts) == 0:
910 return tcp_connect_timeout
912 return timeouts[0]
915 def agent_encryption_of(hostname):
916 settings = host_extra_conf(hostname, agent_encryption)
917 if settings:
918 return settings[0]
920 return {'use_regular': 'disable', 'use_realtime': 'enforce'}
923 def agent_target_version(hostname):
924 agent_target_versions = host_extra_conf(hostname, check_mk_agent_target_versions)
925 if agent_target_versions:
926 spec = agent_target_versions[0]
927 if spec == "ignore":
928 return None
929 elif spec == "site":
930 return cmk.__version__
931 elif isinstance(spec, str):
932 # Compatibility to old value specification format (a single version string)
933 return spec
934 elif spec[0] == 'specific':
935 return spec[1]
937 return spec # return the whole spec in case of an "at least version" config
941 # Explicit custom variables
943 def get_explicit_service_custom_variables(hostname, description):
944 try:
945 return explicit_service_custom_variables[(hostname, description)]
946 except KeyError:
947 return {}
951 # SNMP
955 # Determine SNMP community for a specific host. It the host is found
956 # int the map snmp_communities, that community is returned. Otherwise
957 # the snmp_default_community is returned (wich is preset with
958 # "public", but can be overridden in main.mk
959 def snmp_credentials_of(hostname):
960 try:
961 return explicit_snmp_communities[hostname]
962 except KeyError:
963 pass
965 communities = host_extra_conf(hostname, snmp_communities)
966 if len(communities) > 0:
967 return communities[0]
969 # nothing configured for this host -> use default
970 return snmp_default_community
973 def snmp_character_encoding_of(hostname):
974 entries = host_extra_conf(hostname, snmp_character_encodings)
975 if len(entries) > 0:
976 return entries[0]
979 def snmp_timing_of(hostname):
980 timing = host_extra_conf(hostname, snmp_timing)
981 if len(timing) > 0:
982 return timing[0]
983 return {}
986 def snmpv3_contexts_of(hostname):
987 return host_extra_conf(hostname, snmpv3_contexts)
990 def oid_range_limits_of(hostname):
991 return host_extra_conf(hostname, snmp_limit_oid_range)
994 def snmp_port_of(hostname):
995 # type: (str) -> int
996 ports = host_extra_conf(hostname, snmp_ports)
997 if len(ports) == 0:
998 return 161
999 return ports[0]
1002 def is_bulkwalk_host(hostname):
1003 # type: (str) -> bool
1004 if bulkwalk_hosts:
1005 return in_binary_hostlist(hostname, bulkwalk_hosts)
1007 return False
1010 def bulk_walk_size_of(hostname):
1011 bulk_sizes = host_extra_conf(hostname, snmp_bulk_size)
1012 if not bulk_sizes:
1013 return 10
1015 return bulk_sizes[0]
1018 def is_snmpv2or3_without_bulkwalk_host(hostname):
1019 return in_binary_hostlist(hostname, snmpv2c_hosts)
1022 # TODO: Replace call sites with HostConfig access and remove this
1023 def is_usewalk_host(hostname):
1024 return get_config_cache().get_host_config(hostname).is_usewalk_host
1027 def is_inline_snmp_host(hostname):
1028 # TODO: Better use "inline_snmp" once we have moved the code to an own module
1029 has_inline_snmp = "netsnmp" in sys.modules
1030 return has_inline_snmp and use_inline_snmp \
1031 and not in_binary_hostlist(hostname, non_inline_snmp_hosts)
1035 # Groups
1039 def hostgroups_of(hostname):
1040 return host_extra_conf(hostname, host_groups)
1043 def summary_hostgroups_of(hostname):
1044 return host_extra_conf(hostname, summary_host_groups)
1047 def contactgroups_of(hostname):
1048 cgrs = []
1050 # host_contactgroups may take single values as well as
1051 # lists as item value. Of all list entries only the first
1052 # one is used. The single-contact-groups entries are all
1053 # recognized.
1054 first_list = True
1055 for entry in host_extra_conf(hostname, host_contactgroups):
1056 if isinstance(entry, list) and first_list:
1057 cgrs += entry
1058 first_list = False
1059 else:
1060 cgrs.append(entry)
1062 if monitoring_core == "nagios" and enable_rulebased_notifications:
1063 cgrs.append("check-mk-notify")
1065 return list(set(cgrs))
1069 # Misc
1073 def exit_code_spec(hostname, data_source_id=None):
1074 spec = {}
1075 specs = host_extra_conf(hostname, check_mk_exit_status)
1076 for entry in specs[::-1]:
1077 spec.update(entry)
1078 return _get_exit_code_spec(spec, data_source_id)
1081 def _get_exit_code_spec(spec, data_source_id):
1082 if data_source_id is not None:
1083 try:
1084 return spec["individual"][data_source_id]
1085 except KeyError:
1086 pass
1088 try:
1089 return spec["overall"]
1090 except KeyError:
1091 pass
1093 # Old configuration format
1094 return spec
1097 def check_period_of(hostname, service):
1098 periods = get_config_cache().service_extra_conf(hostname, service, check_periods)
1099 if periods:
1100 period = periods[0]
1101 if period == "24X7":
1102 return None
1104 return period
1106 return None
1109 def check_interval_of(hostname, section_name):
1110 if not cmk_base.cmk_base.check_utils.is_snmp_check(section_name):
1111 return # no values at all for non snmp checks
1113 # Previous to 1.5 "match" could be a check name (including subchecks) instead of
1114 # only main check names -> section names. This has been cleaned up, but we still
1115 # need to be compatible. Strip of the sub check part of "match".
1116 for match, minutes in host_extra_conf(hostname, snmp_check_interval):
1117 if match is None or match.split(".")[0] == section_name:
1118 return minutes # use first match
1122 # .--Cluster-------------------------------------------------------------.
1123 # | ____ _ _ |
1124 # | / ___| |_ _ ___| |_ ___ _ __ |
1125 # | | | | | | | / __| __/ _ \ '__| |
1126 # | | |___| | |_| \__ \ || __/ | |
1127 # | \____|_|\__,_|___/\__\___|_| |
1128 # | |
1129 # +----------------------------------------------------------------------+
1130 # | Code dealing with clusters (virtual hosts that are used to deal with |
1131 # | services that can move between physical nodes. |
1132 # '----------------------------------------------------------------------'
1135 # Checks whether or not the given host is a cluster host
1136 def is_cluster(hostname):
1137 # all_configured_clusters() needs to be used, because this function affects
1138 # the agent bakery, which needs all configured hosts instead of just the hosts
1139 # of this site
1140 return hostname in all_configured_clusters()
1143 # Returns the nodes of a cluster, or None if hostname is not a cluster
1144 def nodes_of(hostname):
1145 return get_config_cache().nodes_of(hostname)
1148 # Determine weather a service (found on a physical host) is a clustered
1149 # service and - if yes - return the cluster host of the service. If
1150 # no, returns the hostname of the physical host.
1151 def host_of_clustered_service(hostname, servicedesc, part_of_clusters=None):
1152 return get_config_cache().host_of_clustered_service(
1153 hostname, servicedesc, part_of_clusters=part_of_clusters)
1157 # .--Services------------------------------------------------------------.
1158 # | ____ _ |
1159 # | / ___| ___ _ ____ _(_) ___ ___ ___ |
1160 # | \___ \ / _ \ '__\ \ / / |/ __/ _ \/ __| |
1161 # | ___) | __/ | \ V /| | (_| __/\__ \ |
1162 # | |____/ \___|_| \_/ |_|\___\___||___/ |
1163 # | |
1164 # +----------------------------------------------------------------------+
1165 # | Service related helper functions |
1166 # '----------------------------------------------------------------------'
1168 # Renaming of service descriptions while keeping backward compatibility with
1169 # existing installations.
1170 # Synchronize with htdocs/wato.py and plugins/wato/check_mk_configuration.py!
1173 # Cleanup! .. some day
1174 def _get_old_cmciii_temp_description(item):
1175 if "Temperature" in item:
1176 return False, item # old item format, no conversion
1178 parts = item.split(" ")
1179 if parts[0] == "Ambient":
1180 return False, "%s Temperature" % parts[1]
1182 elif len(parts) == 2:
1183 return False, "%s %s.Temperature" % (parts[1], parts[0])
1185 else:
1186 if parts[1] == "LCP":
1187 parts[1] = "Liquid_Cooling_Package"
1188 return False, "%s %s.%s-Temperature" % (parts[1], parts[0], parts[2])
1191 _old_service_descriptions = {
1192 "df": "fs_%s",
1193 "df_netapp": "fs_%s",
1194 "df_netapp32": "fs_%s",
1195 "esx_vsphere_datastores": "fs_%s",
1196 "hr_fs": "fs_%s",
1197 "vms_diskstat.df": "fs_%s",
1198 "zfsget": "fs_%s",
1199 "ps": "proc_%s",
1200 "ps.perf": "proc_%s",
1201 "wmic_process": "proc_%s",
1202 "services": "service_%s",
1203 "logwatch": "LOG %s",
1204 "logwatch.groups": "LOG %s",
1205 "hyperv_vm": "hyperv_vms",
1206 "ibm_svc_mdiskgrp": "MDiskGrp %s",
1207 "ibm_svc_system": "IBM SVC Info",
1208 "ibm_svc_systemstats.diskio": "IBM SVC Throughput %s Total",
1209 "ibm_svc_systemstats.iops": "IBM SVC IOPS %s Total",
1210 "ibm_svc_systemstats.disk_latency": "IBM SVC Latency %s Total",
1211 "ibm_svc_systemstats.cache": "IBM SVC Cache Total",
1212 "mknotifyd": "Notification Spooler %s",
1213 "mknotifyd.connection": "Notification Connection %s",
1214 "casa_cpu_temp": "Temperature %s",
1215 "cmciii.temp": _get_old_cmciii_temp_description,
1216 "cmciii.psm_current": "%s",
1217 "cmciii_lcp_airin": "LCP Fanunit Air IN",
1218 "cmciii_lcp_airout": "LCP Fanunit Air OUT",
1219 "cmciii_lcp_water": "LCP Fanunit Water %s",
1220 "etherbox.temp": "Sensor %s",
1221 # While using the old description, don't append the item, even when discovered
1222 # with the new check which creates an item.
1223 "liebert_bat_temp": lambda item: (False, "Battery Temp"),
1224 "nvidia.temp": "Temperature NVIDIA %s",
1225 "ups_bat_temp": "Temperature Battery %s",
1226 "innovaphone_temp": lambda item: (False, "Temperature"),
1227 "enterasys_temp": lambda item: (False, "Temperature"),
1228 "raritan_emx": "Rack %s",
1229 "raritan_pdu_inlet": "Input Phase %s",
1230 "postfix_mailq": lambda item: (False, "Postfix Queue"),
1231 "nullmailer_mailq": lambda item: (False, "Nullmailer Queue"),
1232 "barracuda_mailqueues": lambda item: (False, "Mail Queue"),
1233 "qmail_stats": lambda item: (False, "Qmail Queue"),
1234 "mssql_backup": "%s Backup",
1235 "mssql_counters.cache_hits": "%s",
1236 "mssql_counters.transactions": "%s Transactions",
1237 "mssql_counters.locks": "%s Locks",
1238 "mssql_counters.sqlstats": "%s",
1239 "mssql_counters.pageactivity": "%s Page Activity",
1240 "mssql_counters.locks_per_batch": "%s Locks per Batch",
1241 "mssql_counters.file_sizes": "%s File Sizes",
1242 "mssql_databases": "%s Database",
1243 "mssql_datafiles": "Datafile %s",
1244 "mssql_tablespaces": "%s Sizes",
1245 "mssql_transactionlogs": "Transactionlog %s",
1246 "mssql_versions": "%s Version",
1247 "mssql_blocked_sessions": lambda item: (False, "MSSQL Blocked Sessions"),
1251 def service_description(hostname, check_plugin_name, item):
1252 if check_plugin_name not in check_info:
1253 if item:
1254 return "Unimplemented check %s / %s" % (check_plugin_name, item)
1255 return "Unimplemented check %s" % check_plugin_name
1257 # use user-supplied service description, if available
1258 add_item = True
1259 descr_format = service_descriptions.get(check_plugin_name)
1260 if not descr_format:
1261 # handle renaming for backward compatibility
1262 if check_plugin_name in _old_service_descriptions and \
1263 check_plugin_name not in use_new_descriptions_for:
1265 # Can be a fucntion to generate the old description more flexible.
1266 old_descr = _old_service_descriptions[check_plugin_name]
1267 if callable(old_descr):
1268 add_item, descr_format = old_descr(item)
1269 else:
1270 descr_format = old_descr
1272 else:
1273 descr_format = check_info[check_plugin_name]["service_description"]
1275 if isinstance(descr_format, str):
1276 descr_format = descr_format.decode("utf-8")
1278 # Note: we strip the service description (remove spaces).
1279 # One check defines "Pages %s" as a description, but the item
1280 # can by empty in some cases. Nagios silently drops leading
1281 # and trailing spaces in the configuration file.
1282 if add_item and isinstance(item, six.string_types + (numbers.Integral,)):
1283 if "%s" not in descr_format:
1284 descr_format += " %s"
1285 descr = descr_format % (item,)
1286 else:
1287 descr = descr_format
1289 if "%s" in descr:
1290 raise MKGeneralException("Found '%%s' in service description (Host: %s, Check type: %s, Item: %s). "
1291 "Please try to rediscover the service to fix this issue." % \
1292 (hostname, check_plugin_name, item))
1294 return get_final_service_description(hostname, descr)
1297 _old_active_check_service_descriptions = {
1298 "http": lambda params: (params[0][1:] if params[0].startswith("^") else "HTTP %s" % params[0])
1302 def active_check_service_description(hostname, active_check_name, params):
1303 if active_check_name not in active_check_info:
1304 return "Unimplemented check %s" % active_check_name
1306 if (active_check_name in _old_active_check_service_descriptions and
1307 active_check_name not in use_new_descriptions_for):
1308 description = _old_active_check_service_descriptions[active_check_name](params)
1309 else:
1310 act_info = active_check_info[active_check_name]
1311 description = act_info["service_description"](params)
1313 description = description.replace('$HOSTNAME$', hostname)
1315 return get_final_service_description(hostname, description)
1318 def get_final_service_description(hostname, description):
1319 translations = get_service_translations(hostname)
1320 if translations:
1321 # Translate
1322 description = cmk.utils.translations.translate_service_description(
1323 translations, description)
1325 # Sanitize; Remove illegal characters from a service description
1326 description = description.strip()
1327 cache = cmk_base.config_cache.get_dict("final_service_description")
1328 try:
1329 new_description = cache[description]
1330 except KeyError:
1331 new_description = "".join(
1332 [c for c in description if c not in nagios_illegal_chars]).rstrip("\\")
1333 cache[description] = new_description
1335 return new_description
1338 def service_ignored(hostname, check_plugin_name, description):
1339 if check_plugin_name and check_plugin_name in ignored_checktypes:
1340 return True
1342 if check_plugin_name and _checktype_ignored_for_host(hostname, check_plugin_name):
1343 return True
1345 if description is not None \
1346 and get_config_cache().in_boolean_serviceconf_list(hostname, description, ignored_services):
1347 return True
1349 return False
1352 def _checktype_ignored_for_host(host, checktype):
1353 if checktype in ignored_checktypes:
1354 return True
1355 ignored = host_extra_conf(host, ignored_checks)
1356 for e in ignored:
1357 if checktype == e or (isinstance(e, list) and checktype in e):
1358 return True
1359 return False
1362 # TODO: Make this use the generic "rulesets" functions
1363 # a) This function has never been configurable via WATO (see https://mathias-kettner.de/checkmk_service_dependencies.html)
1364 # b) It only affects the Nagios core - CMC does not implement service dependencies
1365 # c) This function implements some specific regex replacing match+replace which makes it incompatible to
1366 # regular service rulesets. Therefore service_extra_conf() can not easily be used :-/
1367 def service_depends_on(hostname, servicedesc):
1368 """Return a list of services this services depends upon"""
1369 deps = []
1370 config_cache = get_config_cache()
1371 for entry in service_dependencies:
1372 entry, rule_options = get_rule_options(entry)
1373 if rule_options.get("disabled"):
1374 continue
1376 if len(entry) == 3:
1377 depname, hostlist, patternlist = entry
1378 tags = []
1379 elif len(entry) == 4:
1380 depname, tags, hostlist, patternlist = entry
1381 else:
1382 raise MKGeneralException("Invalid entry '%r' in service dependencies: "
1383 "must have 3 or 4 entries" % entry)
1385 if hosttags_match_taglist(config_cache.tag_list_of_host(hostname), tags) and \
1386 in_extraconf_hostlist(hostlist, hostname):
1387 for pattern in patternlist:
1388 matchobject = regex(pattern).search(servicedesc)
1389 if matchobject:
1390 try:
1391 item = matchobject.groups()[-1]
1392 deps.append(depname % item)
1393 except:
1394 deps.append(depname)
1395 return deps
1399 # .--Misc Helpers--------------------------------------------------------.
1400 # | __ __ _ _ _ _ |
1401 # | | \/ (_)___ ___ | | | | ___| |_ __ ___ _ __ ___ |
1402 # | | |\/| | / __|/ __| | |_| |/ _ \ | '_ \ / _ \ '__/ __| |
1403 # | | | | | \__ \ (__ | _ | __/ | |_) | __/ | \__ \ |
1404 # | |_| |_|_|___/\___| |_| |_|\___|_| .__/ \___|_| |___/ |
1405 # | |_| |
1406 # +----------------------------------------------------------------------+
1407 # | Different helper functions |
1408 # '----------------------------------------------------------------------'
1411 def is_cmc():
1412 """Whether or not the site is currently configured to use the Microcore."""
1413 return monitoring_core == "cmc"
1416 def decode_incoming_string(s, encoding="utf-8"):
1417 try:
1418 return s.decode(encoding)
1419 except:
1420 return s.decode(fallback_agent_output_encoding)
1423 def translate_piggyback_host(sourcehost, backedhost):
1424 translation = _get_piggyback_translations(sourcehost)
1426 # To make it possible to match umlauts we need to change the hostname
1427 # to a unicode string which can then be matched with regexes etc.
1428 # We assume the incoming name is correctly encoded in UTF-8
1429 backedhost = decode_incoming_string(backedhost)
1431 translated = cmk.utils.translations.translate_hostname(translation, backedhost)
1433 return translated.encode('utf-8') # change back to UTF-8 encoded string
1436 def _get_piggyback_translations(hostname):
1437 """Get a dict that specifies the actions to be done during the hostname translation"""
1438 rules = host_extra_conf(hostname, piggyback_translation)
1439 translations = {}
1440 for rule in rules[::-1]:
1441 translations.update(rule)
1442 return translations
1445 def get_service_translations(hostname):
1446 translations_cache = cmk_base.config_cache.get_dict("service_description_translations")
1447 if hostname in translations_cache:
1448 return translations_cache[hostname]
1450 rules = host_extra_conf(hostname, service_description_translation)
1451 translations = {}
1452 for rule in rules[::-1]:
1453 for k, v in rule.items():
1454 if isinstance(v, list):
1455 translations.setdefault(k, set())
1456 translations[k] |= set(v)
1457 else:
1458 translations[k] = v
1460 translations_cache[hostname] = translations
1461 return translations
1464 def prepare_check_command(command_spec, hostname, description):
1465 """Prepares a check command for execution by Check_MK.
1467 This function either accepts a string or a list of arguments as
1468 command_spec. In case a list is given it quotes the single elements. It
1469 also prepares password store entries for the command line. These entries
1470 will be completed by the executed program later to get the password from
1471 the password store.
1473 if isinstance(command_spec, six.string_types):
1474 return command_spec
1476 if not isinstance(command_spec, list):
1477 raise NotImplementedError()
1479 passwords, formated = [], []
1480 for arg in command_spec:
1481 arg_type = type(arg)
1483 if arg_type in [int, float]:
1484 formated.append("%s" % arg)
1486 elif arg_type in [str, unicode]:
1487 formated.append(cmk_base.utils.quote_shell_string(arg))
1489 elif arg_type == tuple and len(arg) == 3:
1490 pw_ident, preformated_arg = arg[1:]
1491 try:
1492 password = stored_passwords[pw_ident]["password"]
1493 except KeyError:
1494 if hostname and description:
1495 descr = " used by service \"%s\" on host \"%s\"" % (description, hostname)
1496 elif hostname:
1497 descr = " used by host host \"%s\"" % (hostname)
1498 else:
1499 descr = ""
1501 console.warning(
1502 "The stored password \"%s\"%s does not exist (anymore)." % (pw_ident, descr))
1503 password = "%%%"
1505 pw_start_index = str(preformated_arg.index("%s"))
1506 formated.append(
1507 cmk_base.utils.quote_shell_string(preformated_arg % ("*" * len(password))))
1508 passwords.append((str(len(formated)), pw_start_index, pw_ident))
1510 else:
1511 raise MKGeneralException("Invalid argument for command line: %r" % (arg,))
1513 if passwords:
1514 formated = ["--pwstore=%s" % ",".join(["@".join(p) for p in passwords])] + formated
1516 return " ".join(formated)
1519 def get_http_proxy(http_proxy):
1520 # type: (Tuple) -> Optional[str]
1521 """Returns proxy URL to be used for HTTP requests
1523 Pass a value configured by the user using the HTTPProxyReference valuespec to this function
1524 and you will get back ether a proxy URL, an empty string to enforce no proxy usage or None
1525 to use the proxy configuration from the process environment.
1527 if not isinstance(http_proxy, tuple):
1528 return None
1530 proxy_type, value = http_proxy
1532 if proxy_type == "environment":
1533 return None
1535 if proxy_type == "global":
1536 return http_proxies.get(value, {}).get("proxy_url", None)
1538 if proxy_type == "url":
1539 return value
1541 if proxy_type == "no_proxy":
1542 return ""
1544 return None
1548 # .--Host rulesets-------------------------------------------------------.
1549 # | _ _ _ _ _ |
1550 # | | | | | ___ ___| |_ _ __ _ _| | ___ ___ ___| |_ ___ |
1551 # | | |_| |/ _ \/ __| __| | '__| | | | |/ _ \/ __|/ _ \ __/ __| |
1552 # | | _ | (_) \__ \ |_ | | | |_| | | __/\__ \ __/ |_\__ \ |
1553 # | |_| |_|\___/|___/\__| |_| \__,_|_|\___||___/\___|\__|___/ |
1554 # | |
1555 # +----------------------------------------------------------------------+
1556 # | Host ruleset matching |
1557 # '----------------------------------------------------------------------'
1560 def host_extra_conf(hostname, ruleset):
1561 return get_config_cache().host_extra_conf(hostname, ruleset)
1564 def host_extra_conf_merged(hostname, conf):
1565 return get_config_cache().host_extra_conf_merged(hostname, conf)
1569 # .--Host matching-------------------------------------------------------.
1570 # | _ _ _ _ _ _ |
1571 # | | | | | ___ ___| |_ _ __ ___ __ _| |_ ___| |__ (_)_ __ __ _ |
1572 # | | |_| |/ _ \/ __| __| | '_ ` _ \ / _` | __/ __| '_ \| | '_ \ / _` | |
1573 # | | _ | (_) \__ \ |_ | | | | | | (_| | || (__| | | | | | | | (_| | |
1574 # | |_| |_|\___/|___/\__| |_| |_| |_|\__,_|\__\___|_| |_|_|_| |_|\__, | |
1575 # | |___/ |
1576 # +----------------------------------------------------------------------+
1577 # | Code for calculating the host condition matching of rules |
1578 # '----------------------------------------------------------------------'
1581 def all_matching_hosts(tags, hostlist, with_foreign_hosts):
1582 return get_config_cache().all_matching_hosts(tags, hostlist, with_foreign_hosts)
1585 def in_extraconf_hostlist(hostlist, hostname):
1586 """Whether or not the given host matches the hostlist.
1588 Entries in list are hostnames that must equal the hostname.
1589 Expressions beginning with ! are negated: if they match,
1590 the item is excluded from the list.
1592 Expressions beginning with ~ are treated as regular expression.
1593 Also the three special tags '@all', '@clusters', '@physical'
1594 are allowed.
1597 # Migration help: print error if old format appears in config file
1598 # FIXME: When can this be removed?
1599 try:
1600 if hostlist[0] == "":
1601 raise MKGeneralException('Invalid empty entry [ "" ] in configuration')
1602 except IndexError:
1603 pass # Empty list, no problem.
1605 for hostentry in hostlist:
1606 if hostentry == '':
1607 raise MKGeneralException('Empty hostname in host list %r' % hostlist)
1608 negate = False
1609 use_regex = False
1610 if hostentry[0] == '@':
1611 if hostentry == '@all':
1612 return True
1613 ic = is_cluster(hostname)
1614 if hostentry == '@cluster' and ic:
1615 return True
1616 elif hostentry == '@physical' and not ic:
1617 return True
1619 # Allow negation of hostentry with prefix '!'
1620 else:
1621 if hostentry[0] == '!':
1622 hostentry = hostentry[1:]
1623 negate = True
1625 # Allow regex with prefix '~'
1626 if hostentry[0] == '~':
1627 hostentry = hostentry[1:]
1628 use_regex = True
1630 try:
1631 if not use_regex and hostname == hostentry:
1632 return not negate
1633 # Handle Regex. Note: hostname == True -> generic unknown host
1634 elif use_regex and hostname != True:
1635 if regex(hostentry).match(hostname) is not None:
1636 return not negate
1637 except MKGeneralException:
1638 if cmk.utils.debug.enabled():
1639 raise
1641 return False
1644 def in_binary_hostlist(hostname, conf):
1645 return get_config_cache().in_binary_hostlist(hostname, conf)
1648 def parse_host_rule(rule):
1649 rule, rule_options = get_rule_options(rule)
1651 num_elements = len(rule)
1652 if num_elements == 2:
1653 item, hostlist = rule
1654 tags = []
1655 elif num_elements == 3:
1656 item, tags, hostlist = rule
1657 else:
1658 raise MKGeneralException("Invalid entry '%r' in host configuration list: must "
1659 "have 2 or 3 entries" % (rule,))
1661 return item, tags, hostlist, rule_options
1664 def get_rule_options(entry):
1665 """Get the options from a rule.
1667 Pick out the option element of a rule. Currently the options "disabled"
1668 and "comments" are being honored."""
1669 if isinstance(entry[-1], dict):
1670 return entry[:-1], entry[-1]
1672 return entry, {}
1675 def hosttags_match_taglist(hosttags, required_tags):
1676 """Check if a host fulfills the requirements of a tag list.
1678 The host must have all tags in the list, except
1679 for those negated with '!'. Those the host must *not* have!
1680 A trailing + means a prefix match."""
1681 for tag in required_tags:
1682 negate, tag = _parse_negated(tag)
1683 if tag and tag[-1] == '+':
1684 tag = tag[:-1]
1685 matches = False
1686 for t in hosttags:
1687 if t.startswith(tag):
1688 matches = True
1689 break
1691 else:
1692 matches = tag in hosttags
1694 if matches == negate:
1695 return False
1697 return True
1700 def _parse_negated(pattern):
1701 # Allow negation of pattern with prefix '!'
1702 try:
1703 negate = pattern[0] == '!'
1704 if negate:
1705 pattern = pattern[1:]
1706 except IndexError:
1707 negate = False
1709 return negate, pattern
1712 # Converts a regex pattern which is used to e.g. match services within Check_MK
1713 # to a function reference to a matching function which takes one parameter to
1714 # perform the matching and returns a two item tuple where the first element
1715 # tells wether or not the pattern is negated and the second element the outcome
1716 # of the match.
1717 # This function tries to parse the pattern and return different kind of matching
1718 # functions which can then be performed faster than just using the regex match.
1719 def _convert_pattern(pattern):
1720 def is_infix_string_search(pattern):
1721 return pattern.startswith('.*') and not is_regex(pattern[2:])
1723 def is_exact_match(pattern):
1724 return pattern[-1] == '$' and not is_regex(pattern[:-1])
1726 def is_prefix_match(pattern):
1727 return pattern[-2:] == '.*' and not is_regex(pattern[:-2])
1729 if pattern == '':
1730 return False, lambda txt: True # empty patterns match always
1732 negate, pattern = _parse_negated(pattern)
1734 if is_exact_match(pattern):
1735 # Exact string match
1736 return negate, lambda txt: pattern[:-1] == txt
1738 elif is_infix_string_search(pattern):
1739 # Using regex to search a substring within text
1740 return negate, lambda txt: pattern[2:] in txt
1742 elif is_prefix_match(pattern):
1743 # prefix match with tailing .*
1744 pattern = pattern[:-2]
1745 return negate, lambda txt: txt[:len(pattern)] == pattern
1747 elif is_regex(pattern):
1748 # Non specific regex. Use real prefix regex matching
1749 return negate, lambda txt: regex(pattern).match(txt) is not None
1751 # prefix match without any regex chars
1752 return negate, lambda txt: txt[:len(pattern)] == pattern
1755 def _convert_pattern_list(patterns):
1756 return tuple([_convert_pattern(p) for p in patterns])
1759 # Slow variant of checking wether a service is matched by a list
1760 # of regexes - used e.g. by cmk --notify
1761 def in_extraconf_servicelist(servicelist, service):
1762 return _in_servicematcher_list(_convert_pattern_list(servicelist), service)
1765 def _in_servicematcher_list(service_matchers, item):
1766 for negate, func in service_matchers:
1767 result = func(item)
1768 if result:
1769 return not negate
1771 # no match in list -> negative answer
1772 return False
1776 # .--Constants-----------------------------------------------------------.
1777 # | ____ _ _ |
1778 # | / ___|___ _ __ ___| |_ __ _ _ __ | |_ ___ |
1779 # | | | / _ \| '_ \/ __| __/ _` | '_ \| __/ __| |
1780 # | | |__| (_) | | | \__ \ || (_| | | | | |_\__ \ |
1781 # | \____\___/|_| |_|___/\__\__,_|_| |_|\__|___/ |
1782 # | |
1783 # +----------------------------------------------------------------------+
1784 # | Some constants to be used in the configuration and at other places |
1785 # '----------------------------------------------------------------------'
1787 # Conveniance macros for legacy tuple based host and service rules
1788 # TODO: Deprecate these in a gentle way
1789 PHYSICAL_HOSTS = cmk.utils.rulesets.tuple_rulesets.PHYSICAL_HOSTS
1790 CLUSTER_HOSTS = cmk.utils.rulesets.tuple_rulesets.CLUSTER_HOSTS
1791 ALL_HOSTS = cmk.utils.rulesets.tuple_rulesets.ALL_HOSTS
1792 ALL_SERVICES = cmk.utils.rulesets.tuple_rulesets.ALL_SERVICES
1793 NEGATE = cmk.utils.rulesets.tuple_rulesets.NEGATE
1795 # TODO: Cleanup access to check_info[] -> replace it by different function calls
1796 # like for example check_exists(...)
1798 # BE AWARE: sync these global data structures with
1799 # _initialize_data_structures()
1800 # TODO: Refactor this.
1802 # The checks are loaded into this dictionary. Each check
1803 _check_contexts = {} # type: Dict[str, Any]
1804 # has a separate sub-dictionary, named by the check name.
1805 # It is populated with the includes and the check itself.
1807 # The following data structures will be filled by the checks
1808 # all known checks
1809 check_info = {} # type: Dict[str, Union[Tuple[Any], Dict[str, Any]]]
1810 # library files needed by checks
1811 check_includes = {} # type: Dict[str, List[Any]]
1812 # optional functions for parameter precompilation
1813 precompile_params = {} # type: Dict[str, Callable[[str, str, Dict[str, Any]], Any]]
1814 # dictionary-configured checks declare their default level variables here
1815 check_default_levels = {} # type: Dict[str, Any]
1816 # factory settings for dictionary-configured checks
1817 factory_settings = {} # type: Dict[str, Dict[str, Any]]
1818 # variables (names) in checks/* needed for check itself
1819 check_config_variables = [] # type: List[Any]
1820 # whichs OIDs to fetch for which check (for tabular information)
1821 snmp_info = {} # type: Dict[str, Union[Tuple[Any], List[Tuple[Any]]]]
1822 # SNMP autodetection
1823 snmp_scan_functions = {} # type: Dict[str, Callable[[Callable[[str], str]], bool]]
1824 # definitions of active "legacy" checks
1825 active_check_info = {} # type: Dict[str, Dict[str, Any]]
1826 special_agent_info = {
1827 } # type: Dict[str, Callable[[Dict[str, Any], str, str], Union[str, List[str]]]]
1829 # Names of variables registered in the check files. This is used to
1830 # keep track of the variables needed by each file. Those variables are then
1831 # (if available) read from the config and applied to the checks module after
1832 # reading in the configuration of the user.
1833 _check_variables = {} # type: Dict[str, List[Any]]
1834 # keeps the default values of all the check variables
1835 _check_variable_defaults = {} # type: Dict[str, Any]
1836 _all_checks_loaded = False
1838 # workaround: set of check-groups that are to be treated as service-checks even if
1839 # the item is None
1840 service_rule_groups = set(["temperature"])
1843 # .--Loading-------------------------------------------------------------.
1844 # | _ _ _ |
1845 # | | | ___ __ _ __| (_)_ __ __ _ |
1846 # | | | / _ \ / _` |/ _` | | '_ \ / _` | |
1847 # | | |__| (_) | (_| | (_| | | | | | (_| | |
1848 # | |_____\___/ \__,_|\__,_|_|_| |_|\__, | |
1849 # | |___/ |
1850 # +----------------------------------------------------------------------+
1851 # | Loading of check plugins |
1852 # '----------------------------------------------------------------------'
1855 def load_all_checks(get_check_api_context):
1856 """Load all checks and includes"""
1857 global _all_checks_loaded
1859 _initialize_data_structures()
1860 filelist = get_plugin_paths(cmk.utils.paths.local_checks_dir, cmk.utils.paths.checks_dir)
1861 load_checks(get_check_api_context, filelist)
1863 _all_checks_loaded = True
1866 def _initialize_data_structures():
1867 """Initialize some data structures which are populated while loading the checks"""
1868 global _all_checks_loaded
1869 _all_checks_loaded = False
1871 _check_variables.clear()
1872 _check_variable_defaults.clear()
1874 _check_contexts.clear()
1875 check_info.clear()
1876 check_includes.clear()
1877 precompile_params.clear()
1878 check_default_levels.clear()
1879 factory_settings.clear()
1880 del check_config_variables[:]
1881 snmp_info.clear()
1882 snmp_scan_functions.clear()
1883 active_check_info.clear()
1884 special_agent_info.clear()
1887 def get_plugin_paths(*dirs):
1888 filelist = []
1889 for directory in dirs:
1890 filelist += _plugin_pathnames_in_directory(directory)
1891 return filelist
1894 # Now read in all checks. Note: this is done *before* reading the
1895 # configuration, because checks define variables with default
1896 # values user can override those variables in his configuration.
1897 # If a check or check.include is both found in local/ and in the
1898 # normal structure, then only the file in local/ must be read!
1899 def load_checks(get_check_api_context, filelist):
1900 cmk_global_vars = set(get_variable_names())
1902 loaded_files = set()
1904 for f in filelist:
1905 if f[0] == "." or f[-1] == "~":
1906 continue # ignore editor backup / temp files
1908 file_name = os.path.basename(f)
1909 if file_name in loaded_files:
1910 continue # skip already loaded files (e.g. from local)
1912 try:
1913 check_context = new_check_context(get_check_api_context)
1915 known_vars = check_context.keys()
1916 known_checks = check_info.keys()
1917 known_active_checks = active_check_info.keys()
1919 load_check_includes(f, check_context)
1921 load_precompiled_plugin(f, check_context)
1922 loaded_files.add(file_name)
1924 except MKTerminate:
1925 raise
1927 except Exception as e:
1928 console.error("Error in plugin file %s: %s\n", f, e)
1929 if cmk.utils.debug.enabled():
1930 raise
1931 else:
1932 continue
1934 new_checks = set(check_info.keys()).difference(known_checks)
1935 new_active_checks = set(active_check_info.keys()).difference(known_active_checks)
1937 # Now store the check context for all checks found in this file
1938 for check_plugin_name in new_checks:
1939 _check_contexts[check_plugin_name] = check_context
1941 for check_plugin_name in new_active_checks:
1942 _check_contexts[check_plugin_name] = check_context
1944 # Collect all variables that the check file did introduce compared to the
1945 # default check context
1946 new_check_vars = {}
1947 for varname in set(check_context.keys()).difference(known_vars):
1948 new_check_vars[varname] = check_context[varname]
1950 # The default_levels_variable of check_info also declares use of a global
1951 # variable. Register it here for this context.
1952 for check_plugin_name in new_checks:
1953 # The check_info is not converted yet (convert_check_info()). This means we need
1954 # to deal with old style tuple configured checks
1955 if isinstance(check_info[check_plugin_name], tuple):
1956 default_levels_varname = check_default_levels.get(check_plugin_name)
1957 else:
1958 default_levels_varname = check_info[check_plugin_name].get(
1959 "default_levels_variable")
1961 if default_levels_varname:
1962 # Add the initial configuration to the check context to have a consistent state
1963 check_context[default_levels_varname] = factory_settings.get(
1964 default_levels_varname, {})
1965 new_check_vars[default_levels_varname] = check_context[default_levels_varname]
1967 # Save check variables for e.g. after config loading that the config can
1968 # be added to the check contexts
1969 for varname, value in new_check_vars.items():
1970 # Do not allow checks to override Check_MK builtin global variables. Silently
1971 # skip them here. The variables will only be locally available to the checks.
1972 if varname in cmk_global_vars:
1973 continue
1975 if varname.startswith("_"):
1976 continue
1978 if inspect.isfunction(value) or inspect.ismodule(value):
1979 continue
1981 _check_variable_defaults[varname] = value
1983 # Keep track of which variable needs to be set to which context
1984 context_ident_list = _check_variables.setdefault(varname, [])
1985 context_ident_list += new_checks
1986 context_ident_list += new_active_checks
1988 # Now convert check_info to new format.
1989 convert_check_info()
1990 verify_checkgroup_members()
1991 initialize_check_type_caches()
1994 def all_checks_loaded():
1995 """Whether or not all(!) checks have been loaded into the current process"""
1996 return _all_checks_loaded
1999 def any_check_loaded():
2000 """Whether or not some checks have been loaded into the current process"""
2001 return bool(_check_contexts)
2004 # Constructs a new check context dictionary. It contains the whole check API.
2005 def new_check_context(get_check_api_context):
2006 # Add the data structures where the checks register with Check_MK
2007 context = {
2008 "check_info": check_info,
2009 "check_includes": check_includes,
2010 "precompile_params": precompile_params,
2011 "check_default_levels": check_default_levels,
2012 "factory_settings": factory_settings,
2013 "check_config_variables": check_config_variables,
2014 "snmp_info": snmp_info,
2015 "snmp_scan_functions": snmp_scan_functions,
2016 "active_check_info": active_check_info,
2017 "special_agent_info": special_agent_info,
2019 # NOTE: For better separation it would be better to copy the values, but
2020 # this might consume too much memory, so we simply reference them.
2021 context.update(get_check_api_context())
2022 return context
2025 # Load the definitions of the required include files for this check
2026 # Working with imports when specifying the includes would be much cleaner,
2027 # sure. But we need to deal with the current check API.
2028 def load_check_includes(check_file_path, check_context):
2029 for include_file_name in cached_includes_of_plugin(check_file_path):
2030 include_file_path = check_include_file_path(include_file_name)
2031 try:
2032 load_precompiled_plugin(include_file_path, check_context)
2033 except MKTerminate:
2034 raise
2036 except Exception as e:
2037 console.error("Error in check include file %s: %s\n", include_file_path, e)
2038 if cmk.utils.debug.enabled():
2039 raise
2040 else:
2041 continue
2044 def check_include_file_path(include_file_name):
2045 local_path = os.path.join(cmk.utils.paths.local_checks_dir, include_file_name)
2046 if os.path.exists(local_path):
2047 return local_path
2048 return os.path.join(cmk.utils.paths.checks_dir, include_file_name)
2051 def cached_includes_of_plugin(check_file_path):
2052 cache_file_path = _include_cache_file_path(check_file_path)
2053 try:
2054 return _get_cached_check_includes(check_file_path, cache_file_path)
2055 except OSError:
2056 pass # No usable cache. Terminate
2058 includes = includes_of_plugin(check_file_path)
2059 _write_check_include_cache(cache_file_path, includes)
2060 return includes
2063 def _get_cached_check_includes(check_file_path, cache_file_path):
2064 check_stat = os.stat(check_file_path)
2065 cache_stat = os.stat(cache_file_path)
2067 if check_stat.st_mtime >= cache_stat.st_mtime:
2068 raise OSError("Cache is too old")
2070 # There are no includes (just the newline at the end)
2071 if cache_stat.st_size == 1:
2072 return [] # No includes
2074 # store.save_file() creates file empty for locking (in case it does not exists).
2075 # Skip loading the file.
2076 # Note: When raising here this process will also write the file. This means it
2077 # will write it another time after it was written by the other process. This
2078 # could be optimized. Since the whole caching here is a temporary(tm) soltion,
2079 # we leave it as it is.
2080 if cache_stat.st_size == 0:
2081 raise OSError("Cache generation in progress (file is locked)")
2083 x = open(cache_file_path).read().strip()
2084 if not x:
2085 return [] # Shouldn't happen. Empty files are handled above
2086 return x.split("|")
2089 def _write_check_include_cache(cache_file_path, includes):
2090 store.makedirs(os.path.dirname(cache_file_path))
2091 store.save_file(cache_file_path, "%s\n" % "|".join(includes))
2094 def _include_cache_file_path(path):
2095 is_local = path.startswith(cmk.utils.paths.local_checks_dir)
2096 return os.path.join(cmk.utils.paths.include_cache_dir, "local" if is_local else "builtin",
2097 os.path.basename(path))
2100 # Parse the check file without executing the code to find the check include
2101 # files the check uses. The following statements are extracted:
2102 # check_info[...] = { "includes": [...] }
2103 # inv_info[...] = { "includes": [...] }
2104 # check_includes[...] = [...]
2105 def includes_of_plugin(check_file_path):
2106 include_names = OrderedDict()
2108 def _load_from_check_info(node):
2109 if not isinstance(node.value, ast.Dict):
2110 return
2112 for key, val in zip(node.value.keys, node.value.values):
2113 if key.s == "includes":
2114 if isinstance(val, ast.List):
2115 for element in val.elts:
2116 include_names[element.s] = True
2117 else:
2118 raise MKGeneralException("Includes must be a list of include file names, "
2119 "found '%s'" % type(val))
2121 def _load_from_check_includes(node):
2122 if isinstance(node.value, ast.List):
2123 for element in node.value.elts:
2124 include_names[element.s] = True
2126 tree = ast.parse(open(check_file_path).read())
2127 for child in ast.iter_child_nodes(tree):
2128 if not isinstance(child, ast.Assign):
2129 continue # We only care about top level assigns
2131 # Filter out assignments to check_info dictionary
2132 for target in child.targets:
2133 if isinstance(target, ast.Subscript) and isinstance(target.value, ast.Name):
2134 if target.value.id in ["check_info", "inv_info"]:
2135 _load_from_check_info(child)
2136 elif target.value.id == "check_includes":
2137 _load_from_check_includes(child)
2139 return include_names.keys()
2142 def _plugin_pathnames_in_directory(path):
2143 if path and os.path.exists(path):
2144 return sorted([
2145 path + "/" + f
2146 for f in os.listdir(path)
2147 if not f.startswith(".") and not f.endswith(".include")
2149 return []
2152 def load_precompiled_plugin(path, check_context):
2153 """Loads the given check or check include plugin into the given
2154 check context.
2156 To improve loading speed the files are not read directly. The files are
2157 python byte-code compiled before in case it has not been done before. In
2158 case there is already a compiled file that is newer than the current one,
2159 then the precompiled file is loaded."""
2161 precompiled_path = _precompiled_plugin_path(path)
2163 if not _is_plugin_precompiled(path, precompiled_path):
2164 console.vverbose("Precompile %s to %s\n" % (path, precompiled_path))
2165 store.makedirs(os.path.dirname(precompiled_path))
2166 py_compile.compile(path, precompiled_path, doraise=True)
2168 exec (marshal.loads(open(precompiled_path, "rb").read()[8:]), check_context)
2171 def _is_plugin_precompiled(path, precompiled_path):
2172 if not os.path.exists(precompiled_path):
2173 return False
2175 # Check precompiled file header
2176 f = open(precompiled_path, "rb")
2178 file_magic = f.read(4)
2179 if file_magic != py_compile.MAGIC:
2180 return False
2182 try:
2183 origin_file_mtime = struct.unpack("I", f.read(4))[0]
2184 except struct.error:
2185 return False
2187 if long(os.stat(path).st_mtime) != origin_file_mtime:
2188 return False
2190 return True
2193 def _precompiled_plugin_path(path):
2194 is_local = path.startswith(cmk.utils.paths.local_checks_dir)
2195 return os.path.join(cmk.utils.paths.precompiled_checks_dir, "local" if is_local else "builtin",
2196 os.path.basename(path))
2199 def check_variable_names():
2200 return _check_variables.keys()
2203 def get_check_variable_defaults():
2204 """Returns the check variable default settings. These are the settings right
2205 after loading the checks."""
2206 return _check_variable_defaults
2209 def set_check_variables(check_variables):
2210 """Update the check related config variables in the relevant check contexts"""
2211 for varname, value in check_variables.items():
2212 for context_ident in _check_variables[varname]:
2213 _check_contexts[context_ident][varname] = value
2216 def get_check_variables():
2217 """Returns the currently effective check variable settings
2219 Since the variables are only stored in the individual check contexts and not stored
2220 in a central place, this function needs to collect the values from the check contexts.
2221 We assume a single variable has the same value in all relevant contexts, which means
2222 that it is enough to get the variable from the first context."""
2223 check_config = {}
2224 for varname, context_ident_list in _check_variables.iteritems():
2225 check_config[varname] = _check_contexts[context_ident_list[0]][varname]
2226 return check_config
2229 def get_check_context(check_plugin_name):
2230 """Returns the context dictionary of the given check plugin"""
2231 return _check_contexts[check_plugin_name]
2234 # FIXME: Clear / unset all legacy variables to prevent confusions in other code trying to
2235 # use the legacy variables which are not set by newer checks.
2236 def convert_check_info():
2237 check_info_defaults = {
2238 "check_function": None,
2239 "inventory_function": None,
2240 "parse_function": None,
2241 "group": None,
2242 "snmp_info": None,
2243 "snmp_scan_function": None,
2244 "handle_empty_info": False,
2245 "handle_real_time_checks": False,
2246 "default_levels_variable": None,
2247 "node_info": False,
2248 "extra_sections": [],
2249 "service_description": None,
2250 "has_perfdata": False,
2251 "management_board": None,
2254 for check_plugin_name, info in check_info.items():
2255 section_name = cmk_base.check_utils.section_name_of(check_plugin_name)
2257 if not isinstance(info, dict):
2258 # Convert check declaration from old style to new API
2259 check_function, descr, has_perfdata, inventory_function = info
2261 scan_function = snmp_scan_functions.get(check_plugin_name,
2262 snmp_scan_functions.get(section_name))
2264 check_info[check_plugin_name] = {
2265 "check_function": check_function,
2266 "service_description": descr,
2267 "has_perfdata": bool(has_perfdata),
2268 "inventory_function": inventory_function,
2269 # Insert check name as group if no group is being defined
2270 "group": check_plugin_name,
2271 "snmp_info": snmp_info.get(check_plugin_name),
2272 # Sometimes the scan function is assigned to the check_plugin_name
2273 # rather than to the base name.
2274 "snmp_scan_function": scan_function,
2275 "handle_empty_info": False,
2276 "handle_real_time_checks": False,
2277 "default_levels_variable": check_default_levels.get(check_plugin_name),
2278 "node_info": False,
2279 "parse_function": None,
2280 "extra_sections": [],
2281 "management_board": None,
2283 else:
2284 # Ensure that there are only the known keys set. Is meant to detect typos etc.
2285 for key in info.keys():
2286 if key != "includes" and key not in check_info_defaults:
2287 raise MKGeneralException(
2288 "The check '%s' declares an unexpected key '%s' in 'check_info'." %
2289 (check_plugin_name, key))
2291 # Check does already use new API. Make sure that all keys are present,
2292 # extra check-specific information into file-specific variables.
2293 for key, val in check_info_defaults.items():
2294 info.setdefault(key, val)
2296 # Include files are related to the check file (= the section_name),
2297 # not to the (sub-)check. So we keep them in check_includes.
2298 check_includes.setdefault(section_name, [])
2299 check_includes[section_name] += info.get("includes", [])
2301 # Make sure that setting for node_info of check and subcheck matches
2302 for check_plugin_name, info in check_info.iteritems():
2303 if "." in check_plugin_name:
2304 section_name = cmk_base.check_utils.section_name_of(check_plugin_name)
2305 if section_name not in check_info:
2306 if info["node_info"]:
2307 raise MKGeneralException(
2308 "Invalid check implementation: node_info for %s is "
2309 "True, but base check %s not defined" % (check_plugin_name, section_name))
2311 elif check_info[section_name]["node_info"] != info["node_info"]:
2312 raise MKGeneralException(
2313 "Invalid check implementation: node_info for %s "
2314 "and %s are different." % ((section_name, check_plugin_name)))
2316 # Now gather snmp_info and snmp_scan_function back to the
2317 # original arrays. Note: these information is tied to a "agent section",
2318 # not to a check. Several checks may use the same SNMP info and scan function.
2319 for check_plugin_name, info in check_info.iteritems():
2320 section_name = cmk_base.check_utils.section_name_of(check_plugin_name)
2321 if info["snmp_info"] and section_name not in snmp_info:
2322 snmp_info[section_name] = info["snmp_info"]
2324 if info["snmp_scan_function"] and section_name not in snmp_scan_functions:
2325 snmp_scan_functions[section_name] = info["snmp_scan_function"]
2328 # This function validates the checks which are members of checkgroups to have either
2329 # all or none an item. Mixed checkgroups lead to strange exceptions when processing
2330 # the check parameters. So it is much better to catch these errors in a central place
2331 # with a clear error message.
2332 def verify_checkgroup_members():
2333 groups = checks_by_checkgroup()
2335 for group_name, check_entries in groups.items():
2336 with_item, without_item = [], []
2337 for check_plugin_name, check_info_entry in check_entries:
2338 # Trying to detect whether or not the check has an item. But this mechanism is not
2339 # 100% reliable since Check_MK appends an item to the service_description when "%s"
2340 # is not in the checks service_description template.
2341 # Maybe we need to define a new rule which enforces the developer to use the %s in
2342 # the service_description. At least for grouped checks.
2343 if "%s" in check_info_entry["service_description"]:
2344 with_item.append(check_plugin_name)
2345 else:
2346 without_item.append(check_plugin_name)
2348 if with_item and without_item:
2349 raise MKGeneralException(
2350 "Checkgroup %s has checks with and without item! At least one of "
2351 "the checks in this group needs to be changed (With item: %s, "
2352 "Without item: %s)" % (group_name, ", ".join(with_item), ", ".join(without_item)))
2355 def checks_by_checkgroup():
2356 groups = {}
2357 for check_plugin_name, check in check_info.items():
2358 group_name = check["group"]
2359 if group_name:
2360 groups.setdefault(group_name, [])
2361 groups[group_name].append((check_plugin_name, check))
2362 return groups
2365 # These caches both only hold the base names of the checks
2366 def initialize_check_type_caches():
2367 snmp_cache = cmk_base.runtime_cache.get_set("check_type_snmp")
2368 snmp_cache.update(snmp_info.keys())
2370 tcp_cache = cmk_base.runtime_cache.get_set("check_type_tcp")
2371 for check_plugin_name in check_info:
2372 section_name = cmk_base.check_utils.section_name_of(check_plugin_name)
2373 if section_name not in snmp_cache:
2374 tcp_cache.add(section_name)
2378 # .--Helpers-------------------------------------------------------------.
2379 # | _ _ _ |
2380 # | | | | | ___| |_ __ ___ _ __ ___ |
2381 # | | |_| |/ _ \ | '_ \ / _ \ '__/ __| |
2382 # | | _ | __/ | |_) | __/ | \__ \ |
2383 # | |_| |_|\___|_| .__/ \___|_| |___/ |
2384 # | |_| |
2385 # +----------------------------------------------------------------------+
2386 # | Misc check related helper functions |
2387 # '----------------------------------------------------------------------'
2390 def discoverable_tcp_checks():
2391 types = []
2392 for check_plugin_name, check in check_info.items():
2393 if cmk_base.check_utils.is_tcp_check(check_plugin_name) and check["inventory_function"]:
2394 types.append(check_plugin_name)
2395 return sorted(types)
2398 def discoverable_snmp_checks():
2399 types = []
2400 for check_plugin_name, check in check_info.items():
2401 if cmk_base.check_utils.is_snmp_check(check_plugin_name) and check["inventory_function"]:
2402 types.append(check_plugin_name)
2403 return sorted(types)
2406 # Compute parameters for a check honoring factory settings,
2407 # default settings of user in main.mk, check_parameters[] and
2408 # the values code in autochecks (given as parameter params)
2409 def compute_check_parameters(host, checktype, item, params):
2410 if checktype not in check_info: # handle vanished checktype
2411 return None
2413 params = _update_with_default_check_parameters(checktype, params)
2414 params = _update_with_configured_check_parameters(host, checktype, item, params)
2416 return params
2419 def _update_with_default_check_parameters(checktype, params):
2420 # Handle dictionary based checks
2421 def_levels_varname = check_info[checktype].get("default_levels_variable")
2423 # Handle case where parameter is None but the type of the
2424 # default value is a dictionary. This is for example the
2425 # case if a check type has gotten parameters in a new version
2426 # but inventory of the old version left None as a parameter.
2427 # Also from now on we support that the inventory simply puts
2428 # None as a parameter. We convert that to an empty dictionary
2429 # that will be updated with the factory settings and default
2430 # levels, if possible.
2431 if params is None and def_levels_varname:
2432 fs = factory_settings.get(def_levels_varname)
2433 if isinstance(fs, dict):
2434 params = {}
2436 # Honor factory settings for dict-type checks. Merge
2437 # dict type checks with multiple matching rules
2438 if isinstance(params, dict):
2440 # Start with factory settings
2441 if def_levels_varname:
2442 new_params = factory_settings.get(def_levels_varname, {}).copy()
2443 else:
2444 new_params = {}
2446 # Merge user's default settings onto it
2447 check_context = _check_contexts[checktype]
2448 if def_levels_varname and def_levels_varname in check_context:
2449 def_levels = check_context[def_levels_varname]
2450 if isinstance(def_levels, dict):
2451 new_params.update(def_levels)
2453 # Merge params from inventory onto it
2454 new_params.update(params)
2455 params = new_params
2457 return params
2460 def _update_with_configured_check_parameters(host, checktype, item, params):
2461 descr = service_description(host, checktype, item)
2463 config_cache = get_config_cache()
2465 # Get parameters configured via checkgroup_parameters
2466 entries = _get_checkgroup_parameters(config_cache, host, checktype, item)
2468 # Get parameters configured via check_parameters
2469 entries += config_cache.service_extra_conf(host, descr, check_parameters)
2471 if entries:
2472 if _has_timespecific_params(entries):
2473 # some parameters include timespecific settings
2474 # these will be executed just before the check execution
2475 return TimespecificParamList(entries)
2477 # loop from last to first (first must have precedence)
2478 for entry in entries[::-1]:
2479 if isinstance(params, dict) and isinstance(entry, dict):
2480 params.update(entry)
2481 else:
2482 if isinstance(entry, dict):
2483 # The entry still has the reference from the rule..
2484 # If we don't make a deepcopy the rule might be modified by
2485 # a followup params.update(...)
2486 entry = copy.deepcopy(entry)
2487 params = entry
2488 return params
2491 def _has_timespecific_params(entries):
2492 for entry in entries:
2493 if isinstance(entry, dict) and "tp_default_value" in entry:
2494 return True
2495 return False
2498 def _get_checkgroup_parameters(config_cache, host, checktype, item):
2499 checkgroup = check_info[checktype]["group"]
2500 if not checkgroup:
2501 return []
2502 rules = checkgroup_parameters.get(checkgroup)
2503 if rules is None:
2504 return []
2506 try:
2507 # checks without an item
2508 if item is None and checkgroup not in service_rule_groups:
2509 return config_cache.host_extra_conf(host, rules)
2511 # checks with an item need service-specific rules
2512 return config_cache.service_extra_conf(host, item, rules)
2513 except MKGeneralException as e:
2514 raise MKGeneralException(str(e) + " (on host %s, checktype %s)" % (host, checktype))
2517 def do_status_data_inventory_for(hostname):
2518 rules = active_checks.get('cmk_inv')
2519 if rules is None:
2520 return False
2522 # 'host_extra_conf' is already cached thus we can
2523 # use it after every check cycle.
2524 entries = get_config_cache().host_extra_conf(hostname, rules)
2526 if not entries:
2527 return False # No matching rule -> disable
2529 # Convert legacy rules to current dict format (just like the valuespec)
2530 params = {} if entries[0] is None else entries[0]
2532 return params.get('status_data_inventory', False)
2535 def do_host_label_discovery_for(hostname):
2536 rules = active_checks.get('cmk_inv')
2537 if rules is None:
2538 return True
2540 entries = get_config_cache().host_extra_conf(hostname, rules)
2542 if not entries:
2543 return True # No matching rule -> disable
2545 # Convert legacy rules to current dict format (just like the valuespec)
2546 params = {} if entries[0] is None else entries[0]
2548 return params.get("host_label_inventory", True)
2551 def filter_by_management_board(hostname,
2552 found_check_plugin_names,
2553 for_mgmt_board,
2554 for_discovery=False,
2555 for_inventory=False):
2557 In order to decide which check is used for which data source
2558 we have to filter the found check plugins. This is done via
2559 the check_info key "management_board". There are three values
2560 with the following meanings:
2561 - MGMT_ONLY
2562 These check plugins
2563 - are only used for management board data sources,
2564 - have the prefix 'mgmt_' in their name,
2565 - have the prefix 'Management Interface:' in their service description.
2566 - If there is an equivalent host check plugin then it must be 'HOST_ONLY'.
2568 - HOST_PRECEDENCE
2569 - Default value for all check plugins.
2570 - It does not have to be declared in the check_info.
2571 - Special situation for SNMP management boards:
2572 - If a host is not a SNMP host these checks are used for
2573 the SNMP management boards.
2574 - If a host is a SNMP host these checks are used for
2575 the host itself.
2577 - HOST_ONLY
2578 These check plugins
2579 - are used for 'real' host data sources, not for host management board data sources
2580 - there is an equivalent 'MGMT_ONLY'-management board check plugin.
2583 mgmt_only, host_precedence_snmp, host_only_snmp, host_precedence_tcp, host_only_tcp =\
2584 _get_categorized_check_plugins(found_check_plugin_names, for_inventory=for_inventory)
2586 config_cache = get_config_cache()
2587 host_config = config_cache.get_host_config(hostname)
2589 final_collection = set()
2590 if not host_config.has_management_board:
2591 if host_config.is_snmp_host:
2592 final_collection.update(host_precedence_snmp)
2593 final_collection.update(host_only_snmp)
2594 if host_config.is_agent_host:
2595 final_collection.update(host_precedence_tcp)
2596 final_collection.update(host_only_tcp)
2597 return final_collection
2599 if for_mgmt_board:
2600 final_collection.update(mgmt_only)
2601 if not host_config.is_snmp_host:
2602 final_collection.update(host_precedence_snmp)
2603 if not for_discovery:
2604 # Migration from 1.4 to 1.5:
2605 # in 1.4 TCP hosts with SNMP management boards discovered TCP and
2606 # SNMP checks, eg. uptime and snmp_uptime. During checking phase
2607 # these checks should be executed
2608 # further on.
2609 # In versions >= 1.5 there are management board specific check
2610 # plugins, eg. mgmt_snmp_uptime.
2611 # After a re-discovery Check_MK finds the uptime check plugin for
2612 # the TCP host and the mgmt_snmp_uptime check for the SNMP
2613 # management board. Moreover Check_MK eliminates 'HOST_ONLT'
2614 # checks like snmp_uptime.
2615 final_collection.update(host_only_snmp)
2617 else:
2618 if host_config.is_snmp_host:
2619 final_collection.update(host_precedence_snmp)
2620 final_collection.update(host_only_snmp)
2621 if host_config.is_agent_host:
2622 final_collection.update(host_precedence_tcp)
2623 final_collection.update(host_only_tcp)
2625 return final_collection
2628 def _get_categorized_check_plugins(check_plugin_names, for_inventory=False):
2629 if for_inventory:
2630 is_snmp_check_f = cmk_base.inventory_plugins.is_snmp_plugin
2631 plugins_info = cmk_base.inventory_plugins.inv_info
2632 else:
2633 is_snmp_check_f = cmk_base.check_utils.is_snmp_check
2634 plugins_info = check_info
2636 mgmt_only = set()
2637 host_precedence_snmp = set()
2638 host_precedence_tcp = set()
2639 host_only_snmp = set()
2640 host_only_tcp = set()
2642 for check_plugin_name in check_plugin_names:
2643 if check_plugin_name not in plugins_info:
2644 msg = "Unknown plugin file %s" % check_plugin_name
2645 if cmk.utils.debug.enabled():
2646 raise MKGeneralException(msg)
2647 else:
2648 console.verbose("%s\n" % msg)
2649 continue
2651 is_snmp_check_ = is_snmp_check_f(check_plugin_name)
2652 mgmt_board = _get_management_board_precedence(check_plugin_name, plugins_info)
2653 if mgmt_board == check_api_utils.HOST_PRECEDENCE:
2654 if is_snmp_check_:
2655 host_precedence_snmp.add(check_plugin_name)
2656 else:
2657 host_precedence_tcp.add(check_plugin_name)
2659 elif mgmt_board == check_api_utils.MGMT_ONLY:
2660 mgmt_only.add(check_plugin_name)
2662 elif mgmt_board == check_api_utils.HOST_ONLY:
2663 if is_snmp_check_:
2664 host_only_snmp.add(check_plugin_name)
2665 else:
2666 host_only_tcp.add(check_plugin_name)
2668 return mgmt_only, host_precedence_snmp, host_only_snmp,\
2669 host_precedence_tcp, host_only_tcp
2672 def _get_management_board_precedence(check_plugin_name, plugins_info):
2673 mgmt_board = plugins_info[check_plugin_name].get("management_board")
2674 if mgmt_board is None:
2675 return check_api_utils.HOST_PRECEDENCE
2676 return mgmt_board
2679 cmk_base.cleanup.register_cleanup(check_api_utils.reset_hostname)
2682 # .--Host Configuration--------------------------------------------------.
2683 # | _ _ _ |
2684 # | | | | | ___ ___| |_ |
2685 # | | |_| |/ _ \/ __| __| |
2686 # | | _ | (_) \__ \ |_ |
2687 # | |_| |_|\___/|___/\__| |
2688 # | |
2689 # | ____ __ _ _ _ |
2690 # | / ___|___ _ __ / _(_) __ _ _ _ _ __ __ _| |_(_) ___ _ __ |
2691 # | | | / _ \| '_ \| |_| |/ _` | | | | '__/ _` | __| |/ _ \| '_ \ |
2692 # | | |__| (_) | | | | _| | (_| | |_| | | | (_| | |_| | (_) | | | | |
2693 # | \____\___/|_| |_|_| |_|\__, |\__,_|_| \__,_|\__|_|\___/|_| |_| |
2694 # | |___/ |
2695 # +----------------------------------------------------------------------+
2698 class HostConfig(object):
2699 def __init__(self, config_cache, hostname):
2700 # type: (ConfigCache, str) -> None
2701 super(HostConfig, self).__init__()
2702 self.hostname = hostname
2704 self._config_cache = config_cache
2706 self.is_cluster = is_cluster(hostname)
2707 self.part_of_clusters = self._config_cache.clusters_of(hostname)
2709 # TODO: Rename self.tags to self.tag_list and self.tag_groups to self.tags
2710 self.tags = self._config_cache.tag_list_of_host(self.hostname)
2711 self.tag_groups = host_tags.get(hostname, {})
2712 self.labels = self._get_host_labels()
2713 self.label_sources = self._get_host_label_sources()
2715 # Basic types
2716 self.is_tcp_host = self._config_cache.in_binary_hostlist(hostname, tcp_hosts)
2717 self.is_snmp_host = self._config_cache.in_binary_hostlist(hostname, snmp_hosts)
2718 self.is_usewalk_host = self._config_cache.in_binary_hostlist(hostname, usewalk_hosts)
2720 if "piggyback" in self.tags:
2721 self.is_piggyback_host = True
2722 elif "no-piggyback" in self.tags:
2723 self.is_piggyback_host = False
2724 else: # Legacy automatic detection
2725 self.is_piggyback_host = self.has_piggyback_data
2727 # Agent types
2728 self.is_agent_host = self.is_tcp_host or self.is_piggyback_host
2729 self.management_protocol = management_protocol.get(hostname)
2730 self.has_management_board = self.management_protocol is not None
2732 self.is_ping_host = not self.is_snmp_host and\
2733 not self.is_agent_host and\
2734 not self.has_management_board
2736 self.is_dual_host = self.is_tcp_host and self.is_snmp_host
2737 self.is_all_agents_host = "all-agents" in self.tags
2738 self.is_all_special_agents_host = "special-agents" in self.tags
2740 # IP addresses
2741 # Whether or not the given host is configured not to be monitored via IP
2742 self.is_no_ip_host = "no-ip" in self.tags
2743 self.is_ipv6_host = "ip-v6" in self.tags
2744 # Whether or not the given host is configured to be monitored via IPv4.
2745 # This is the case when it is set to be explicit IPv4 or implicit (when
2746 # host is not an IPv6 host and not a "No IP" host)
2747 self.is_ipv4_host = "ip-v4" in self.tags or (not self.is_ipv6_host and
2748 not self.is_no_ip_host)
2750 self.is_ipv4v6_host = "ip-v6" in self.tags and "ip-v4" in self.tags
2752 # Whether or not the given host is configured to be monitored primarily via IPv6
2753 self.is_ipv6_primary = (not self.is_ipv4v6_host and self.is_ipv6_host) \
2754 or (self.is_ipv4v6_host and self._primary_ip_address_family_of() == "ipv6")
2756 @property
2757 def has_piggyback_data(self):
2758 if piggyback.has_piggyback_raw_data(piggyback_max_cachefile_age, self.hostname):
2759 return True
2761 from cmk_base.data_sources.piggyback import PiggyBackDataSource
2762 return PiggyBackDataSource(self.hostname, None).has_persisted_agent_sections()
2764 def _primary_ip_address_family_of(self):
2765 rules = self._config_cache.host_extra_conf(self.hostname, primary_address_family)
2766 if rules:
2767 return rules[0]
2768 return "ipv4"
2770 def _get_host_labels(self):
2771 """Returns the effective set of host labels from all available sources
2773 1. Discovered labels
2774 2. Ruleset "Host labels"
2775 3. Explicit labels (via host/folder config)
2777 Last one wins.
2779 labels = {}
2780 labels.update(self._discovered_labels_of_host())
2781 labels.update(self._config_cache.host_extra_conf_merged(self.hostname, host_label_rules))
2782 labels.update(host_labels.get(self.hostname, {}))
2783 return labels
2785 def _get_host_label_sources(self):
2786 """Returns the effective set of host label keys with their source identifier instead of the value
2787 Order and merging logic is equal to _get_host_labels()"""
2788 labels = {}
2789 labels.update({k: "discovered" for k in self._discovered_labels_of_host().keys()})
2790 labels.update({k : "ruleset" \
2791 for k in self._config_cache.host_extra_conf_merged(self.hostname, host_label_rules)})
2792 labels.update({k: "explicit" for k in host_labels.get(self.hostname, {}).keys()})
2793 return labels
2795 def _discovered_labels_of_host(self):
2796 # type: () -> Dict
2797 return DiscoveredHostLabelsStore(self.hostname).load()
2801 # .--Configuration Cache-------------------------------------------------.
2802 # | ____ __ _ _ _ |
2803 # | / ___|___ _ __ / _(_) __ _ _ _ _ __ __ _| |_(_) ___ _ __ |
2804 # | | | / _ \| '_ \| |_| |/ _` | | | | '__/ _` | __| |/ _ \| '_ \ |
2805 # | | |__| (_) | | | | _| | (_| | |_| | | | (_| | |_| | (_) | | | | |
2806 # | \____\___/|_| |_|_| |_|\__, |\__,_|_| \__,_|\__|_|\___/|_| |_| |
2807 # | |___/ |
2808 # | ____ _ |
2809 # | / ___|__ _ ___| |__ ___ |
2810 # | | | / _` |/ __| '_ \ / _ \ |
2811 # | | |__| (_| | (__| | | | __/ |
2812 # | \____\__,_|\___|_| |_|\___| |
2813 # | |
2814 # +----------------------------------------------------------------------+
2817 class ConfigCache(object):
2818 def __init__(self):
2819 super(ConfigCache, self).__init__()
2820 self._initialize_caches()
2822 def initialize(self):
2823 self._initialize_caches()
2824 self._collect_hosttags()
2825 self._setup_clusters_nodes_cache()
2827 self._all_processed_hosts = all_active_hosts()
2828 self._all_configured_hosts = all_configured_hosts()
2829 self._initialize_host_lookup()
2831 def _initialize_caches(self):
2832 self.single_host_checks = cmk_base.config_cache.get_dict("single_host_checks")
2833 self.multi_host_checks = cmk_base.config_cache.get_list("multi_host_checks")
2834 self.check_table_cache = cmk_base.config_cache.get_dict("check_tables")
2836 self._cache_is_snmp_check = cmk_base.runtime_cache.get_dict("is_snmp_check")
2837 self._cache_is_tcp_check = cmk_base.runtime_cache.get_dict("is_tcp_check")
2838 self._cache_section_name_of = {}
2840 # Host lookup
2842 # Contains all hostnames which are currently relevant for this cache
2843 # Most of the time all_processed hosts is similar to all_active_hosts
2844 # Howewer, in a multiprocessing environment all_processed_hosts only
2845 # may contain a reduced set of hosts, since each process handles a subset
2846 self._all_processed_hosts = set()
2847 self._all_configured_hosts = set()
2849 # Reference hostname -> dirname including /
2850 self._host_paths = {}
2851 # Reference dirname -> hosts in this dir including subfolders
2852 self._folder_host_lookup = {}
2853 # All used folders used for various set intersection operations
2854 self._folder_path_set = set()
2856 # Host tags
2857 self._hosttags = {}
2858 self._hosttags_without_folder = {}
2860 # Reference hosttags_without_folder -> list of hosts
2861 # Provides a list of hosts with the same hosttags, excluding the folder
2862 self._hosts_grouped_by_tags = {}
2863 # Reference hostname -> tag group reference
2864 self._host_grouped_ref = {}
2866 # Autochecks cache
2867 self._autochecks_cache = {}
2869 # Cache for all_matching_host
2870 self._all_matching_hosts_match_cache = {}
2872 # Caches for host_extra_conf
2873 self._host_extra_conf_ruleset_cache = {}
2874 self._host_extra_conf_match_cache = {}
2876 # Caches for service_extra_conf
2877 self._service_extra_conf_ruleset_cache = {}
2878 self._service_extra_conf_host_matched_ruleset_cache = {}
2879 self._service_extra_conf_match_cache = {}
2881 # Caches for in_boolean_serviceconf_list
2882 self._in_boolean_service_conf_list_ruleset_cache = {}
2883 self._in_boolean_service_conf_list_match_cache = {}
2885 # Cache for in_binary_hostlist
2886 self._in_binary_hostlist_cache = {}
2888 # Caches for nodes and clusters
2889 self._clusters_of_cache = {}
2890 self._nodes_of_cache = {}
2892 # A factor which indicates how much hosts share the same host tag configuration (excluding folders).
2893 # len(all_processed_hosts) / len(different tag combinations)
2894 # It is used to determine the best rule evualation method
2895 self._all_processed_hosts_similarity = 1
2897 # Keep HostConfig instances created with the current configuration cache
2898 self._host_configs = {}
2900 def get_host_config(self, hostname):
2901 """Returns a HostConfig instance for the given host
2903 It lazy initializes the host config object and caches the objects during the livetime
2904 of the ConfigCache."""
2905 host_config = self._host_configs.get(hostname)
2906 if host_config:
2907 return host_config
2909 host_config = self._host_configs[hostname] = HostConfig(self, hostname)
2910 return host_config
2912 def _collect_hosttags(self):
2913 for tagged_host in all_hosts + clusters.keys():
2914 parts = tagged_host.split("|")
2915 self._hosttags[parts[0]] = set(parts[1:])
2917 # TODO: check all call sites and remove this
2918 def tag_list_of_host(self, hostname):
2919 """Returns the list of all configured tags of a host. In case
2920 a host has no tags configured or is not known, it returns an
2921 empty list."""
2922 return self._hosttags.get(hostname, [])
2924 def tags_of_host(self, hostname):
2925 """Returns the dict of all configured tag groups and values of a host"""
2926 return host_tags.get(hostname, {})
2928 def tags_of_service(self, hostname, svc_desc):
2929 """Returns the dict of all configured tags of a service
2930 It takes all explicitly configured tag groups into account.
2932 tags = {}
2933 for entry in self.service_extra_conf(hostname, svc_desc, service_tag_rules):
2934 tags.update(entry)
2935 return tags
2937 def labels_of_service(self, hostname, svc_desc):
2938 """Returns the effective set of service labels from all available sources
2940 1. Discovered labels
2941 2. Ruleset "Service labels"
2943 Last one wins.
2945 labels = {}
2946 labels.update(self.service_extra_conf_merged(hostname, svc_desc, service_label_rules))
2947 return labels
2949 def label_sources_of_service(self, hostname, svc_desc):
2950 """Returns the effective set of service label keys with their source identifier instead of the value
2951 Order and merging logic is equal to labels_of_service()"""
2952 labels = {}
2953 labels.update({
2954 k: "ruleset"
2955 for k in self.service_extra_conf_merged(hostname, svc_desc, service_label_rules)
2957 return labels
2959 def set_all_processed_hosts(self, all_processed_hosts):
2960 self._all_processed_hosts = set(all_processed_hosts)
2962 nodes_and_clusters = set()
2963 for hostname in self._all_processed_hosts:
2964 nodes_and_clusters.update(self._nodes_of_cache.get(hostname, []))
2965 nodes_and_clusters.update(self._clusters_of_cache.get(hostname, []))
2966 self._all_processed_hosts.update(nodes_and_clusters)
2968 # The folder host lookup includes a list of all -processed- hosts within a given
2969 # folder. Any update with set_all_processed hosts invalidates this cache, because
2970 # the scope of relevant hosts has changed. This is -good-, since the values in this
2971 # lookup are iterated one by one later on in all_matching_hosts
2972 self._folder_host_lookup = {}
2974 self._adjust_processed_hosts_similarity()
2976 def _adjust_processed_hosts_similarity(self):
2977 """ This function computes the tag similarities between of the processed hosts
2978 The result is a similarity factor, which helps finding the most perfomant operation
2979 for the current hostset """
2980 used_groups = set()
2981 for hostname in self._all_processed_hosts:
2982 used_groups.add(self._host_grouped_ref[hostname])
2983 self._all_processed_hosts_similarity = (
2984 1.0 * len(self._all_processed_hosts) / len(used_groups))
2986 def _initialize_host_lookup(self):
2987 for hostname in self._all_configured_hosts:
2988 dirname_of_host = os.path.dirname(host_paths[hostname])
2989 if dirname_of_host[-1] != "/":
2990 dirname_of_host += "/"
2991 self._host_paths[hostname] = dirname_of_host
2993 # Determine hosts within folders
2994 dirnames = [
2995 x[0][len(cmk.utils.paths.check_mk_config_dir):] + "/+"
2996 for x in os.walk(cmk.utils.paths.check_mk_config_dir)
2998 self._folder_path_set = set(dirnames)
3000 # Determine hosttags without folder tag
3001 for hostname in self._all_configured_hosts:
3002 tags_without_folder = set(self._hosttags[hostname])
3003 try:
3004 tags_without_folder.remove(self._host_paths[hostname])
3005 except KeyError:
3006 pass
3008 self._hosttags_without_folder[hostname] = tags_without_folder
3010 # Determine hosts with same tag setup (ignoring folder tag)
3011 for hostname in self._all_configured_hosts:
3012 group_ref = tuple(sorted(self._hosttags_without_folder[hostname]))
3013 self._hosts_grouped_by_tags.setdefault(group_ref, set()).add(hostname)
3014 self._host_grouped_ref[hostname] = group_ref
3016 def get_hosts_within_folder(self, folder_path, with_foreign_hosts):
3017 cache_id = with_foreign_hosts, folder_path
3018 if cache_id not in self._folder_host_lookup:
3019 hosts_in_folder = set()
3020 # Strip off "+"
3021 folder_path_tmp = folder_path[:-1]
3022 relevant_hosts = self._all_configured_hosts if with_foreign_hosts else self._all_processed_hosts
3023 for hostname in relevant_hosts:
3024 if self._host_paths[hostname].startswith(folder_path_tmp):
3025 hosts_in_folder.add(hostname)
3026 self._folder_host_lookup[cache_id] = hosts_in_folder
3027 return hosts_in_folder
3028 return self._folder_host_lookup[cache_id]
3030 def get_autochecks_of(self, hostname):
3031 try:
3032 return self._autochecks_cache[hostname]
3033 except KeyError:
3034 result = cmk_base.autochecks.read_autochecks_of(hostname)
3035 self._autochecks_cache[hostname] = result
3036 return result
3038 def section_name_of(self, section):
3039 try:
3040 return self._cache_section_name_of[section]
3041 except KeyError:
3042 section_name = cmk_base.check_utils.section_name_of(section)
3043 self._cache_section_name_of[section] = section_name
3044 return section_name
3046 def is_snmp_check(self, check_plugin_name):
3047 try:
3048 return self._cache_is_snmp_check[check_plugin_name]
3049 except KeyError:
3050 snmp_checks = cmk_base.runtime_cache.get_set("check_type_snmp")
3051 result = self.section_name_of(check_plugin_name) in snmp_checks
3052 self._cache_is_snmp_check[check_plugin_name] = result
3053 return result
3055 def is_tcp_check(self, check_plugin_name):
3056 try:
3057 return self._cache_is_tcp_check[check_plugin_name]
3058 except KeyError:
3059 tcp_checks = cmk_base.runtime_cache.get_set("check_type_tcp")
3060 result = self.section_name_of(check_plugin_name) in tcp_checks
3061 self._cache_is_tcp_check[check_plugin_name] = result
3062 return result
3064 def filter_hosts_with_same_tags_as_host(self, hostname, hosts):
3065 return self._hosts_grouped_by_tags[self._host_grouped_ref[hostname]].intersection(hosts)
3067 def all_matching_hosts(self, tags, hostlist, with_foreign_hosts):
3068 """Returns a set containing the names of hosts that match the given
3069 tags and hostlist conditions."""
3070 cache_id = tuple(tags), tuple(hostlist), with_foreign_hosts
3072 try:
3073 return self._all_matching_hosts_match_cache[cache_id]
3074 except KeyError:
3075 pass
3077 if with_foreign_hosts:
3078 valid_hosts = self._all_configured_hosts
3079 else:
3080 valid_hosts = self._all_processed_hosts
3082 tags_set = set(tags)
3083 tags_set_without_folder = tags_set
3084 rule_path_set = tags_set.intersection(self._folder_path_set)
3085 tags_set_without_folder = tags_set - rule_path_set
3087 if rule_path_set:
3088 # More than one dynamic folder in one rule is simply wrong..
3089 rule_path = list(rule_path_set)[0]
3090 else:
3091 rule_path = "/+"
3093 # Thin out the valid hosts further. If the rule is located in a folder
3094 # we only need the intersection of the folders hosts and the previously determined valid_hosts
3095 valid_hosts = self.get_hosts_within_folder(rule_path,
3096 with_foreign_hosts).intersection(valid_hosts)
3098 # Contains matched hosts
3100 if tags_set_without_folder and hostlist == ALL_HOSTS:
3101 return self._match_hosts_by_tags(cache_id, valid_hosts, tags_set_without_folder)
3103 matching = set([])
3104 only_specific_hosts = not bool([x for x in hostlist if x[0] in ["@", "!", "~"]])
3106 # If no tags are specified and there are only specific hosts we already have the matches
3107 if not tags_set_without_folder and only_specific_hosts:
3108 matching = valid_hosts.intersection(hostlist)
3109 # If no tags are specified and the hostlist only include @all (all hosts)
3110 elif not tags_set_without_folder and hostlist == ALL_HOSTS:
3111 matching = valid_hosts
3112 else:
3113 # If the rule has only exact host restrictions, we can thin out the list of hosts to check
3114 if only_specific_hosts:
3115 hosts_to_check = valid_hosts.intersection(set(hostlist))
3116 else:
3117 hosts_to_check = valid_hosts
3119 for hostname in hosts_to_check:
3120 # When no tag matching is requested, do not filter by tags. Accept all hosts
3121 # and filter only by hostlist
3122 if (not tags or
3123 hosttags_match_taglist(self._hosttags[hostname], tags_set_without_folder)):
3124 if in_extraconf_hostlist(hostlist, hostname):
3125 matching.add(hostname)
3127 self._all_matching_hosts_match_cache[cache_id] = matching
3128 return matching
3130 def _match_hosts_by_tags(self, cache_id, valid_hosts, tags_set_without_folder):
3131 matching = set([])
3132 has_specific_folder_tag = sum([x[0] == "/" for x in tags_set_without_folder])
3133 negative_match_tags = set()
3134 positive_match_tags = set()
3135 for tag in tags_set_without_folder:
3136 if tag[0] == "!":
3137 negative_match_tags.add(tag[1:])
3138 else:
3139 positive_match_tags.add(tag)
3141 if has_specific_folder_tag or self._all_processed_hosts_similarity < 3:
3142 # Without shared folders
3143 for hostname in valid_hosts:
3144 if not positive_match_tags - self._hosttags[hostname]:
3145 if not negative_match_tags.intersection(self._hosttags[hostname]):
3146 matching.add(hostname)
3148 self._all_matching_hosts_match_cache[cache_id] = matching
3149 return matching
3151 # With shared folders
3152 checked_hosts = set()
3153 for hostname in valid_hosts:
3154 if hostname in checked_hosts:
3155 continue
3157 hosts_with_same_tag = self.filter_hosts_with_same_tags_as_host(hostname, valid_hosts)
3158 checked_hosts.update(hosts_with_same_tag)
3160 if not positive_match_tags - self._hosttags[hostname]:
3161 if not negative_match_tags.intersection(self._hosttags[hostname]):
3162 matching.update(hosts_with_same_tag)
3164 self._all_matching_hosts_match_cache[cache_id] = matching
3165 return matching
3167 def host_extra_conf_merged(self, hostname, conf):
3168 rule_dict = {}
3169 for rule in self.host_extra_conf(hostname, conf):
3170 for key, value in rule.items():
3171 rule_dict.setdefault(key, value)
3172 return rule_dict
3174 def host_extra_conf(self, hostname, ruleset):
3175 with_foreign_hosts = hostname not in self._all_processed_hosts
3176 cache_id = id(ruleset), with_foreign_hosts
3177 try:
3178 return self._host_extra_conf_match_cache[cache_id][hostname]
3179 except KeyError:
3180 pass
3182 try:
3183 ruleset = self._host_extra_conf_ruleset_cache[cache_id]
3184 except KeyError:
3185 ruleset = self._convert_host_ruleset(ruleset, with_foreign_hosts)
3186 self._host_extra_conf_ruleset_cache[cache_id] = ruleset
3187 new_cache = {}
3188 for value, hostname_list in ruleset:
3189 for other_hostname in hostname_list:
3190 new_cache.setdefault(other_hostname, []).append(value)
3191 self._host_extra_conf_match_cache[cache_id] = new_cache
3193 if hostname not in self._host_extra_conf_match_cache[cache_id]:
3194 return []
3196 return self._host_extra_conf_match_cache[cache_id][hostname]
3198 def _convert_host_ruleset(self, ruleset, with_foreign_hosts):
3199 new_rules = []
3200 if len(ruleset) == 1 and ruleset[0] == "":
3201 console.warning('deprecated entry [ "" ] in host configuration list')
3203 for rule in ruleset:
3204 item, tags, hostlist, rule_options = parse_host_rule(rule)
3205 if rule_options.get("disabled"):
3206 continue
3208 # Directly compute set of all matching hosts here, this
3209 # will avoid recomputation later
3210 new_rules.append((item, self.all_matching_hosts(tags, hostlist, with_foreign_hosts)))
3212 return new_rules
3214 def service_extra_conf(self, hostname, service, ruleset):
3215 """Compute outcome of a service rule set that has an item."""
3216 # When the requested host is part of the local sites configuration,
3217 # then use only the sites hosts for processing the rules
3218 with_foreign_hosts = hostname not in self._all_processed_hosts
3219 cache_id = id(ruleset), with_foreign_hosts
3221 cached_ruleset = self._service_extra_conf_ruleset_cache.get(cache_id)
3222 if cached_ruleset is None:
3223 cached_ruleset = self._convert_service_ruleset(
3224 ruleset, with_foreign_hosts=with_foreign_hosts)
3225 self._service_extra_conf_ruleset_cache[cache_id] = cached_ruleset
3227 entries = []
3229 for value, hosts, service_matchers in cached_ruleset:
3230 if hostname not in hosts:
3231 continue
3233 descr_cache_id = service_matchers, service
3235 # 20% faster without exception handling
3236 # self._profile_log("descr cache id %r" % (descr_cache_id))
3237 match = self._service_extra_conf_match_cache.get(descr_cache_id)
3238 if match is None:
3239 match = _in_servicematcher_list(service_matchers, service)
3240 self._service_extra_conf_match_cache[descr_cache_id] = match
3242 if match:
3243 entries.append(value)
3245 return entries
3247 def service_extra_conf_merged(self, hostname, service, ruleset):
3248 rule_dict = {}
3249 for rule in self.service_extra_conf(hostname, service, ruleset):
3250 for key, value in rule.items():
3251 rule_dict.setdefault(key, value)
3252 return rule_dict
3254 def _convert_service_ruleset(self, ruleset, with_foreign_hosts):
3255 new_rules = []
3256 for rule in ruleset:
3257 rule, rule_options = get_rule_options(rule)
3258 if rule_options.get("disabled"):
3259 continue
3261 num_elements = len(rule)
3262 if num_elements == 3:
3263 item, hostlist, servlist = rule
3264 tags = []
3265 elif num_elements == 4:
3266 item, tags, hostlist, servlist = rule
3267 else:
3268 raise MKGeneralException("Invalid rule '%r' in service configuration "
3269 "list: must have 3 or 4 elements" % (rule,))
3271 # Directly compute set of all matching hosts here, this
3272 # will avoid recomputation later
3273 hosts = self.all_matching_hosts(tags, hostlist, with_foreign_hosts)
3275 # And now preprocess the configured patterns in the servlist
3276 new_rules.append((item, hosts, _convert_pattern_list(servlist)))
3278 return new_rules
3280 # Compute outcome of a service rule set that just say yes/no
3281 def in_boolean_serviceconf_list(self, hostname, descr, ruleset):
3282 # When the requested host is part of the local sites configuration,
3283 # then use only the sites hosts for processing the rules
3284 with_foreign_hosts = hostname not in self._all_processed_hosts
3285 cache_id = id(ruleset), with_foreign_hosts
3286 try:
3287 ruleset = self._in_boolean_service_conf_list_ruleset_cache[cache_id]
3288 except KeyError:
3289 ruleset = self._convert_boolean_service_ruleset(ruleset, with_foreign_hosts)
3290 self._in_boolean_service_conf_list_ruleset_cache[cache_id] = ruleset
3292 for negate, hosts, service_matchers in ruleset:
3293 if hostname in hosts:
3294 cache_id = service_matchers, descr
3295 try:
3296 match = self._in_boolean_service_conf_list_match_cache[cache_id]
3297 except KeyError:
3298 match = _in_servicematcher_list(service_matchers, descr)
3299 self._in_boolean_service_conf_list_match_cache[cache_id] = match
3301 if match:
3302 return not negate
3303 return False # no match. Do not ignore
3305 def _convert_boolean_service_ruleset(self, ruleset, with_foreign_hosts):
3306 new_rules = []
3307 for rule in ruleset:
3308 entry, rule_options = get_rule_options(rule)
3309 if rule_options.get("disabled"):
3310 continue
3312 if entry[0] == NEGATE: # this entry is logically negated
3313 negate = True
3314 entry = entry[1:]
3315 else:
3316 negate = False
3318 if len(entry) == 2:
3319 hostlist, servlist = entry
3320 tags = []
3321 elif len(entry) == 3:
3322 tags, hostlist, servlist = entry
3323 else:
3324 raise MKGeneralException("Invalid entry '%r' in configuration: "
3325 "must have 2 or 3 elements" % (entry,))
3327 # Directly compute set of all matching hosts here, this
3328 # will avoid recomputation later
3329 hosts = self.all_matching_hosts(tags, hostlist, with_foreign_hosts)
3330 new_rules.append((negate, hosts, _convert_pattern_list(servlist)))
3332 return new_rules
3334 def _setup_clusters_nodes_cache(self):
3335 for cluster, hosts in clusters.items():
3336 clustername = cluster.split('|', 1)[0]
3337 for name in hosts:
3338 self._clusters_of_cache.setdefault(name, []).append(clustername)
3339 self._nodes_of_cache[clustername] = hosts
3341 # Return a list of the cluster host names.
3342 def clusters_of(self, hostname):
3343 return self._clusters_of_cache.get(hostname, [])
3345 # TODO: cleanup none
3346 # Returns the nodes of a cluster. Returns None if no match
3347 def nodes_of(self, hostname):
3348 return self._nodes_of_cache.get(hostname)
3350 # Determine weather a service (found on a physical host) is a clustered
3351 # service and - if yes - return the cluster host of the service. If
3352 # no, returns the hostname of the physical host.
3353 def host_of_clustered_service(self, hostname, servicedesc, part_of_clusters=None):
3354 if part_of_clusters:
3355 the_clusters = part_of_clusters
3356 else:
3357 the_clusters = self.clusters_of(hostname)
3359 if not the_clusters:
3360 return hostname
3362 cluster_mapping = self.service_extra_conf(hostname, servicedesc, clustered_services_mapping)
3363 for cluster in cluster_mapping:
3364 # Check if the host is in this cluster
3365 if cluster in the_clusters:
3366 return cluster
3368 # 1. New style: explicitly assigned services
3369 for cluster, conf in clustered_services_of.iteritems():
3370 nodes = nodes_of(cluster)
3371 if not nodes:
3372 raise MKGeneralException(
3373 "Invalid entry clustered_services_of['%s']: %s is not a cluster." % (cluster,
3374 cluster))
3375 if hostname in nodes and \
3376 self.in_boolean_serviceconf_list(hostname, servicedesc, conf):
3377 return cluster
3379 # 1. Old style: clustered_services assumes that each host belong to
3380 # exactly on cluster
3381 if self.in_boolean_serviceconf_list(hostname, servicedesc, clustered_services):
3382 return the_clusters[0]
3384 return hostname
3386 def in_binary_hostlist(self, hostname, conf):
3387 cache = self._in_binary_hostlist_cache
3389 cache_id = id(conf), hostname
3390 try:
3391 return cache[cache_id]
3392 except KeyError:
3393 pass
3395 # if we have just a list of strings just take it as list of hostnames
3396 if conf and isinstance(conf[0], str):
3397 result = hostname in conf
3398 cache[cache_id] = result
3399 else:
3400 for entry in conf:
3401 actual_host_tags = self.tag_list_of_host(hostname)
3402 entry, rule_options = get_rule_options(entry)
3403 if rule_options.get("disabled"):
3404 continue
3406 try:
3407 # Negation via 'NEGATE'
3408 if entry[0] == NEGATE:
3409 entry = entry[1:]
3410 negate = True
3411 else:
3412 negate = False
3413 # entry should be one-tuple or two-tuple. Tuple's elements are
3414 # lists of strings. User might forget comma in one tuple. Then the
3415 # entry is the list itself.
3416 if isinstance(entry, list):
3417 hostlist = entry
3418 tags = []
3419 else:
3420 if len(entry) == 1: # 1-Tuple with list of hosts
3421 hostlist = entry[0]
3422 tags = []
3423 else:
3424 tags, hostlist = entry
3426 if hosttags_match_taglist(actual_host_tags, tags) and \
3427 in_extraconf_hostlist(hostlist, hostname):
3428 cache[cache_id] = not negate
3429 break
3430 except:
3431 # TODO: Fix this too generic catching (+ bad error message)
3432 raise MKGeneralException("Invalid entry '%r' in host configuration list: "
3433 "must be tuple with 1 or 2 entries" % (entry,))
3434 else:
3435 cache[cache_id] = False
3437 return cache[cache_id]
3440 def get_config_cache():
3441 config_cache = cmk_base.config_cache.get_dict("config_cache")
3442 if not config_cache:
3443 config_cache["cache"] = ConfigCache()
3444 return config_cache["cache"]