Cleanup all direct config.service_extra_conf calls
[check_mk.git] / cmk_base / check_api.py
blob8bc1798d2c11e83beb126a2a96dd22a11b296c9d
1 #!/usr/bin/env python
2 # -*- encoding: utf-8; py-indent-offset: 4 -*-
3 # +------------------------------------------------------------------+
4 # | ____ _ _ __ __ _ __ |
5 # | / ___| |__ ___ ___| | __ | \/ | |/ / |
6 # | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
7 # | | |___| | | | __/ (__| < | | | | . \ |
8 # | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
9 # | |
10 # | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
11 # +------------------------------------------------------------------+
13 # This file is part of Check_MK.
14 # The official homepage is at http://mathias-kettner.de/check_mk.
16 # check_mk is free software; you can redistribute it and/or modify it
17 # under the terms of the GNU General Public License as published by
18 # the Free Software Foundation in version 2. check_mk is distributed
19 # in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
20 # out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
21 # PARTICULAR PURPOSE. See the GNU General Public License for more de-
22 # tails. You should have received a copy of the GNU General Public
23 # License along with GNU Make; see the file COPYING. If not, write
24 # to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
25 # Boston, MA 02110-1301 USA.
27 """
28 The things in this module specify the official Check_MK check API. Meaning all
29 variables, functions etc. and default modules that are available to checks.
31 Modules available by default (pre imported by Check_MK):
32 collections
33 enum
34 fnmatch
35 functools
36 math
39 socket
40 sys
41 time
42 pprint
44 Global variables:
45 from cmk.utils.regex import regex
46 import cmk.utils.render as render
47 core_state_names Names of states. Usually used to convert numeric states
48 to their name for adding it to the plugin output.
49 The mapping is like this:
51 -1: 'PEND'
52 0: 'OK'
53 1: 'WARN'
54 2: 'CRIT'
55 3: 'UNKN'
57 state_markers Symbolic representations of states in plugin output.
58 Will be displayed colored by the Check_MK GUI.
59 The mapping is like this:
61 0: ''
62 1: '(!)'
63 2: '(!!)'
64 3: '(?)'
66 nagios_illegal_chars Characters not allowed to be used in service
67 descriptions. Can be used in discovery functions to
68 remove unwanted characters from a string. The unwanted
69 chars default are: `;~!$%^&*|\'"<>?,()=
72 OID_BIN TODO
73 OID_END TODO
74 OID_END_BIN TODO
75 OID_END_OCTET_STRING TODO
76 OID_STRING TODO
78 MGMT_ONLY Check is only executed for management boards.
79 HOST_PRECEDENCE Use host address/credentials eg. when it's a SNMP HOST.
80 HOST_ONLY Check is only executed for real SNMP hosts.
82 RAISE Used as value for the "onwrap" argument of the get_rate()
83 function. See get_rate() documentation for details
84 SKIP Used as value for the "onwrap" argument of the get_rate()
85 function. See get_rate() documentation for details
86 ZERO Used as value for the "onwrap" argument of the get_rate()
87 function. See get_rate() documentation for details
88 """ # # pylint: disable=pointless-string-statement
90 # NOTE: The above suppression is necessary because our testing framework blindly
91 # concatenates lots of files, including this one.
93 # We import several modules here for the checks
95 # TODO: Move imports directly to checks?
96 import collections # pylint: disable=unused-import
97 import enum # pylint: disable=unused-import
98 import fnmatch # pylint: disable=unused-import
99 import functools
100 import math # pylint: disable=unused-import
101 import os
102 import re # pylint: disable=unused-import
103 import socket # pylint: disable=unused-import
104 import sys # pylint: disable=unused-import
105 import time
106 # NOTE: We do not use pprint in this module, but it is part of the check API.
107 import pprint # pylint: disable=unused-import
109 from typing import Callable, Dict, Iterable, List, Optional, Tuple, Union # pylint: disable=unused-import
111 import six
113 import cmk.utils.debug as _debug
114 import cmk.utils.defines as _defines
115 import cmk.utils.paths as _paths
116 from cmk.utils.exceptions import MKGeneralException
117 from cmk.utils.regex import regex # pylint: disable=unused-import
118 import cmk.utils.render as render
120 # These imports are not meant for use in the API. So we prefix the names
121 # with an underscore. These names will be skipped when loading into the
122 # check context.
123 import cmk_base.utils as _utils
124 import cmk_base.config as _config
125 import cmk_base.console as _console # pylint: disable=unused-import
126 import cmk_base.snmp_utils as _snmp_utils
127 import cmk_base.item_state as _item_state
128 import cmk_base.prediction as _prediction
129 import cmk_base.check_api_utils as _check_api_utils
132 def get_check_api_context():
133 """This is called from cmk_base code to get the Check API things. Don't
134 use this from checks."""
135 return {k: v for k, v in globals().items() if not k.startswith("_")}
139 # .--Check API-----------------------------------------------------------.
140 # | ____ _ _ _ ____ ___ |
141 # | / ___| |__ ___ ___| | __ / \ | _ \_ _| |
142 # | | | | '_ \ / _ \/ __| |/ / / _ \ | |_) | | |
143 # | | |___| | | | __/ (__| < / ___ \| __/| | |
144 # | \____|_| |_|\___|\___|_|\_\ /_/ \_\_| |___| |
145 # | |
146 # +----------------------------------------------------------------------+
147 # | Helper API for being used in checks |
148 # '----------------------------------------------------------------------'
150 # Names of texts usually output by checks
151 core_state_names = _defines.short_service_state_names()
153 # Symbolic representations of states in plugin output
154 state_markers = _check_api_utils.state_markers
156 BINARY = _snmp_utils.BINARY
157 CACHED_OID = _snmp_utils.CACHED_OID
159 OID_END = _snmp_utils.OID_END
160 OID_STRING = _snmp_utils.OID_STRING
161 OID_BIN = _snmp_utils.OID_BIN
162 OID_END_BIN = _snmp_utils.OID_END_BIN
163 OID_END_OCTET_STRING = _snmp_utils.OID_END_OCTET_STRING
164 binstring_to_int = _snmp_utils.binstring_to_int
166 # Management board checks
167 MGMT_ONLY = _check_api_utils.MGMT_ONLY # Use host address/credentials when it's a SNMP HOST
168 HOST_PRECEDENCE = _check_api_utils.HOST_PRECEDENCE # Check is only executed for mgmt board (e.g. Managegment Uptime)
169 HOST_ONLY = _check_api_utils.HOST_ONLY # Check is only executed for real SNMP host (e.g. interfaces)
171 host_name = _check_api_utils.host_name
172 service_description = _check_api_utils.service_description
173 check_type = _check_api_utils.check_type
175 network_interface_scan_registry = _snmp_utils.MutexScanRegistry()
178 def saveint(i):
179 """Tries to cast a string to an integer and return it. In case this
180 fails, it returns 0.
182 Advice: Please don't use this function in new code. It is understood as
183 bad style these days, because in case you get 0 back from this function,
184 you can not know whether it is really 0 or something went wrong."""
185 try:
186 return int(i)
187 except:
188 return 0
191 def savefloat(f):
192 """Tries to cast a string to an float and return it. In case this fails,
193 it returns 0.0.
195 Advice: Please don't use this function in new code. It is understood as
196 bad style these days, because in case you get 0.0 back from this function,
197 you can not know whether it is really 0.0 or something went wrong."""
198 try:
199 return float(f)
200 except:
201 return 0.0
204 # Compatibility wrapper for the pre 1.6 existant conf.service_extra_conf()
205 def service_extra_conf(hostname, service, ruleset):
206 return _config.get_config_cache().service_extra_conf(hostname, service, ruleset)
209 host_extra_conf = _config.host_extra_conf
210 in_binary_hostlist = _config.in_binary_hostlist
211 host_extra_conf_merged = _config.host_extra_conf_merged
213 # TODO: Only used by logwatch check. Can we clean this up?
214 get_rule_options = _config.get_rule_options
216 # These functions were used in some specific checks until 1.6. Don't add it to
217 # the future check API. It's kept here for compatibility reasons for now.
218 in_extraconf_hostlist = _config.in_extraconf_hostlist
219 hosttags_match_taglist = _config.hosttags_match_taglist
220 all_matching_hosts = _config.all_matching_hosts
223 # These functions were used in some specific checks until 1.6. Don't add it to
224 # the future check API. It's kept here for compatibility reasons for now.
225 def tags_of_host(hostname):
226 return _config.get_config_cache().get_host_config(hostname).tags
229 nagios_illegal_chars = _config.nagios_illegal_chars
230 is_ipv6_primary = _config.is_ipv6_primary
231 is_cmc = _config.is_cmc
233 get_age_human_readable = lambda secs: str(render.Age(secs))
234 get_bytes_human_readable = render.fmt_bytes
235 quote_shell_string = _utils.quote_shell_string
238 def get_checkgroup_parameters(group, deflt=None):
239 return _config.checkgroup_parameters.get(group, deflt)
242 # TODO: Replace by some render.* function / move to render module?
243 def get_filesize_human_readable(size):
244 """Format size of a file for humans.
246 Similar to get_bytes_human_readable, but optimized for file
247 sizes. Really only use this for files. We assume that for smaller
248 files one wants to compare the exact bytes of a file, so the
249 threshold to show the value as MB/GB is higher as the one of
250 get_bytes_human_readable()."""
251 if size < 4 * 1024 * 1024:
252 return "%d B" % int(size)
253 elif size < 4 * 1024 * 1024 * 1024:
254 return "%.2f MB" % (float(size) / (1024 * 1024))
255 return "%.2f GB" % (float(size) / (1024 * 1024 * 1024))
258 # TODO: Replace by some render.* function / move to render module?
259 def get_nic_speed_human_readable(speed):
260 """Format network speed (bit/s) for humans."""
261 try:
262 speedi = int(speed)
263 if speedi == 10000000:
264 speed = "10 Mbit/s"
265 elif speedi == 100000000:
266 speed = "100 Mbit/s"
267 elif speedi == 1000000000:
268 speed = "1 Gbit/s"
269 elif speedi < 1500:
270 speed = "%d bit/s" % speedi
271 elif speedi < 1000000:
272 speed = "%.1f Kbit/s" % (speedi / 1000.0)
273 elif speedi < 1000000000:
274 speed = "%.2f Mbit/s" % (speedi / 1000000.0)
275 else:
276 speed = "%.2f Gbit/s" % (speedi / 1000000000.0)
277 except:
278 pass
279 return speed
282 # TODO: Replace by some render.* function / move to render module?
283 def get_timestamp_human_readable(timestamp):
284 """Format a time stamp for humans in "%Y-%m-%d %H:%M:%S" format.
285 In case None is given or timestamp is 0, it returns "never"."""
286 if timestamp:
287 return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timestamp)))
288 return "never"
291 # TODO: Replace by some render.* function / move to render module?
292 def get_relative_date_human_readable(timestamp):
293 """Formats the given timestamp for humans "in ..." for future times
294 or "... ago" for past timestamps."""
295 now = time.time()
296 if timestamp > now:
297 return "in " + get_age_human_readable(timestamp - now)
298 return get_age_human_readable(now - timestamp) + " ago"
301 # TODO: Replace by some render.* function / move to render module?
302 def get_percent_human_readable(perc, precision=2):
303 """Format perc (0 <= perc <= 100 + x) so that precision
304 digits are being displayed. This avoids a "0.00%" for
305 very small numbers."""
306 if abs(perc) > 4:
307 return "%.1f%%" % perc
308 return "%.3g%%" % perc
312 # Counter handling
315 set_item_state = _item_state.set_item_state
316 get_item_state = _item_state.get_item_state
317 get_all_item_states = _item_state.get_all_item_states
318 clear_item_state = _item_state.clear_item_state
319 clear_item_states_by_full_keys = _item_state.clear_item_states_by_full_keys
320 get_rate = _item_state.get_rate
321 get_average = _item_state.get_average
322 # TODO: Cleanup checks and deprecate this
323 last_counter_wrap = _item_state.last_counter_wrap
325 SKIP = _item_state.SKIP
326 RAISE = _item_state.RAISE
327 ZERO = _item_state.ZERO
329 MKCounterWrapped = _item_state.MKCounterWrapped
332 def _normalize_bounds(levels):
333 if len(levels) == 2: # upper warn and crit
334 warn_upper, crit_upper = levels[0], levels[1]
335 warn_lower, crit_lower = None, None
337 else: # upper and lower warn and crit
338 warn_upper, crit_upper = levels[0], levels[1]
339 warn_lower, crit_lower = levels[2], levels[3]
341 return warn_upper, crit_upper, warn_lower, crit_lower
344 def _check_boundaries(value, levels, human_readable_func, unit_info):
345 warn_upper, crit_upper, warn_lower, crit_lower = _normalize_bounds(levels)
346 # Critical cases
347 if crit_upper is not None and value >= crit_upper:
348 return 2, _levelsinfo_ty("at", warn_upper, crit_upper, human_readable_func, unit_info)
349 if crit_lower is not None and value < crit_lower:
350 return 2, _levelsinfo_ty("below", warn_lower, crit_lower, human_readable_func, unit_info)
352 # Warning cases
353 if warn_upper is not None and value >= warn_upper:
354 return 1, _levelsinfo_ty("at", warn_upper, crit_upper, human_readable_func, unit_info)
355 if warn_lower is not None and value < warn_lower:
356 return 1, _levelsinfo_ty("below", warn_lower, crit_lower, human_readable_func, unit_info)
357 return 0, ""
360 def _levelsinfo_ty(ty, warn, crit, human_readable_func, unit_info):
361 return " (warn/crit {0} {1}{3}/{2}{3})".format(ty, human_readable_func(warn),
362 human_readable_func(crit), unit_info)
365 def check_levels(value,
366 dsname,
367 params,
368 unit="",
369 factor=1.0,
370 scale=1.0,
371 statemarkers=False,
372 human_readable_func=None,
373 infoname=None):
374 """Generic function for checking a value against levels
376 This also supports predictive levels.
378 value: currently measured value
379 dsname: name of the datasource in the RRD that corresponds to this value
380 or None in order to skip perfdata
381 params: None or Tuple(None, None) -> no level checking.
382 Tuple variants with non-None values:
383 Tuple[warn_upper, crit_upper] -> upper level checking only.
384 Tuple[warn_upper, crit_upper, warn_lower, crit_lower]
385 -> upper and lower level checking.
386 If a Dict is passed to check_levels, predictive levels are used
387 automatically. The following constellations are possible:
388 Dict containing "lower" as key -> lower level checking.
389 Dict containing "upper" or "levels_upper_min" as key -> upper level checking.
390 Dict containing "lower" and "upper"/"levels_upper_min" as key ->
391 lower and upper level checking.
392 unit: unit to be displayed in the plugin output.
393 Be aware: if a (builtin) human_readable_func is stated which already
394 provides a unit info, then this unit is not necessary. An additional
395 unit info is useful if a rate is calculated, eg.
396 unit="/s",
397 human_readable_func=get_bytes_human_readable,
398 results in 'X B/s'.
399 factor: the levels are multiplied with this factor before applying
400 them to the value. This is being used for the CPU load check
401 currently. The levels here are "per CPU", so the number of
402 CPUs is used as factor.
403 scale: Scale of the levels in relation to "value" and the value in the RRDs.
404 For example if the levels are specified in GB and the RRD store KB, then
405 the scale is 1024*1024.
406 human_readable_func: Single argument function to present in a human readable fashion
407 the value. Builtin human_readable-functions already provide a unit:
408 - get_percent_human_readable
409 - get_age_human_readable
410 - get_bytes_human_readable
411 - get_filesize_human_readable
412 - get_nic_speed_human_readable
413 - get_timestamp_human_readable
414 - get_relative_date_human_readable
415 infoname: Perf value name for infotext like a title.
417 unit_info = ""
418 if unit.startswith('/'):
419 unit_info = unit
420 elif unit:
421 unit_info = " %s" % unit
423 if human_readable_func is None:
424 human_readable_func = lambda x: "%.2f" % (x / scale)
426 def scale_value(v):
427 if v is None:
428 return None
429 return v * factor * scale
431 if infoname:
432 infotext = "%s: %s%s" % (infoname, human_readable_func(value), unit_info)
433 else:
434 infotext = "%s%s" % (human_readable_func(value), unit_info)
436 # None, {} or (None, None) -> do not check any levels
437 if not params or params == (None, None):
438 if dsname:
439 return 0, infotext, [(dsname, value)]
440 return 0, infotext, []
442 # Pair of numbers -> static levels
443 elif isinstance(params, tuple):
444 levels = map(scale_value, _normalize_bounds(params))
445 ref_value = None
447 # Dictionary -> predictive levels
448 else:
449 try:
450 ref_value, levels = \
451 _prediction.get_levels(host_name(), service_description(),
452 dsname, params, "MAX", levels_factor=factor * scale)
454 if ref_value:
455 predictive_levels_msg = "predicted reference: %s" % human_readable_func(ref_value)
456 else:
457 predictive_levels_msg = "no reference for prediction yet"
459 except MKGeneralException as e:
460 ref_value = None
461 levels = [None, None, None, None]
462 predictive_levels_msg = "no reference for prediction (%s)" % e
464 except Exception as e:
465 if _debug.enabled():
466 raise
467 return 3, "%s" % e, []
469 if predictive_levels_msg:
470 infotext += " (%s)" % predictive_levels_msg
472 state, levelstext = _check_boundaries(value, levels, human_readable_func, unit_info)
473 infotext += levelstext
474 if statemarkers:
475 infotext += state_markers[state]
477 if dsname:
478 perfdata = [(dsname, value, levels[0], levels[1])]
479 if ref_value:
480 perfdata.append(('predict_' + dsname, ref_value))
481 else:
482 perfdata = []
484 return state, infotext, perfdata
487 def get_effective_service_level():
488 """Get the service level that applies to the current service.
489 This can only be used within check functions, not during discovery nor parsing."""
490 service_levels = _config.get_config_cache().service_extra_conf(
491 host_name(), service_description(), _config.service_service_levels)
493 if service_levels:
494 return service_levels[0]
495 else:
496 service_levels = _config.host_extra_conf(host_name(), _config.host_service_levels)
497 if service_levels:
498 return service_levels[0]
499 return 0
502 def utc_mktime(time_struct):
503 """Works like time.mktime() but assumes the time_struct to be in UTC,
504 not in local time."""
505 import calendar
506 return calendar.timegm(time_struct)
509 def passwordstore_get_cmdline(fmt, pw):
510 """Use this to prepare a command line argument for using a password from the
511 Check_MK password store or an explicitly configured password."""
512 if not isinstance(pw, tuple):
513 pw = ("password", pw)
515 if pw[0] == "password":
516 return fmt % pw[1]
518 return ("store", pw[1], fmt)
521 def get_http_proxy(http_proxy):
522 """Returns proxy URL to be used for HTTP requests
524 Pass a value configured by the user using the HTTPProxyReference valuespec to this function
525 and you will get back ether a proxy URL, an empty string to enforce no proxy usage or None
526 to use the proxy configuration from the process environment.
528 return _config.get_http_proxy(http_proxy)
531 def get_agent_data_time():
532 """Use this function to get the age of the agent data cache file
533 of tcp or snmp hosts or None in case of piggyback data because
534 we do not exactly know the latest agent data. Maybe one time
535 we can handle this. For cluster hosts an exception is raised."""
536 return _agent_cache_file_age(host_name(), check_type())
539 def _agent_cache_file_age(hostname, check_plugin_name):
540 if _config.is_cluster(hostname):
541 raise MKGeneralException("get_agent_data_time() not valid for cluster")
543 import cmk_base.check_utils
544 if cmk_base.check_utils.is_snmp_check(check_plugin_name):
545 cachefile = _paths.tcp_cache_dir + "/" + hostname + "." + check_plugin_name.split(".")[0]
546 elif cmk_base.check_utils.is_tcp_check(check_plugin_name):
547 cachefile = _paths.tcp_cache_dir + "/" + hostname
548 else:
549 cachefile = None
551 if cachefile is not None and os.path.exists(cachefile):
552 return _utils.cachefile_age(cachefile)
554 return None
557 def get_parsed_item_data(check_function):
558 """Use this decorator to determine the parsed item data outside
559 of the respective check function.
561 The check function can hence be defined as follows:
563 @get_parsed_item_data
564 def check_<check_name>(item, params, data):
567 In case of parsed not being a dict the decorator returns 3
568 (UNKN state) with a wrong usage message.
569 In case of item not existing as a key in parsed or parsed[item]
570 not existing the decorator gives an empty return leading to
571 cmk_base returning 3 (UNKN state) with an item not found message
572 (see cmk_base/checking.py).
575 @functools.wraps(check_function)
576 def wrapped_check_function(item, params, parsed):
577 if not isinstance(parsed, dict):
578 return 3, "Wrong usage of decorator function 'get_parsed_item_data': parsed is not a dict"
579 if item not in parsed or not parsed[item]:
580 return
581 return check_function(item, params, parsed[item])
583 return wrapped_check_function
586 def discover_single(info):
587 # type: (Union[List, Dict]) -> Optional[List]
588 """Return a discovered item in case there is info text or parsed"""
589 if info:
590 return [(None, {})]
591 return None
594 def validate_filter(filter_function):
595 # type: (Callable) -> Callable
596 """Validate function argument is a callable and return it"""
598 if callable(filter_function):
599 return filter_function
600 elif filter_function is not None:
601 raise ValueError("Filtering function is not a callable,"
602 " a {} has been given.".format(type(filter_function)))
603 return lambda *entry: entry[0]
606 def discover(selector=None, default_params=None):
607 # type (Callable, Union[dict, str]) -> Callable
608 """Helper function to assist with service discoveries
610 The discovery function is in many cases just a boilerplate function to
611 recognize services listed in your parsed dictionary or the info
612 list. It in general looks like
614 def inventory_check(parsed):
615 for key, value in parsed.items():
616 if some_condition_based_on(key, value):
617 yield key, parameters
620 The idea of this helper is to allow you only to worry about the logic
621 function that decides if an entry is a service to be discovered or not.
624 Keyword Arguments:
625 selector -- Filtering function (default lambda entry: entry[0])
626 Default: Uses the key or first item of info variable
627 default_params -- Default parameters for discovered items (default {})
629 Possible uses:
631 If your discovery function recognizes every entry of your parsed
632 dictionary or every row of the info list as a service, then you
633 just need to call discover().
635 check_info["chk"] = {'inventory_function': discover()}
637 In case you want to have a simple filter function when dealing with
638 the info list, you can directly give a lambda function. If this
639 function returns a Boolean the first item of every entry is taken
640 as the service name, if the function returns a string that will be
641 taken as the service name. For this example we discover as services
642 entries where item3 is positive and name the service according to
643 item2.
645 check_info["chk"] = {'inventory_function': discover(selector=lambda line: line[2] if line[3]>0 else False)}
647 In case you have a more complicated selector condition and also
648 want to include default parameters you may use a decorator.
650 Please note: that this discovery function does not work with the
651 whole parsed/info data but only implements the logic for selecting
652 each individual entry as a service.
654 In the next example, we will process each entry of the parsed data
655 dictionary. Use as service name the capitalized key when the
656 corresponding value has certain keywords.
658 @discover(default_params="the_check_default_levels")
659 def inventory_thecheck(key, value):
660 required_entries = ["used", "ready", "total", "uptime"]
661 if all(data in value for data in required_entries):
662 return key.upper()
664 check_info["chk"] = {'inventory_function': inventory_thecheck}
667 def roller(parsed):
668 if isinstance(parsed, dict):
669 return parsed.iteritems()
670 elif isinstance(parsed, (list, tuple)):
671 return parsed
672 raise ValueError("Discovery function only works with dictionaries,"
673 " lists, and tuples you gave a {}".format(type(parsed)))
675 def _discovery(filter_function):
676 # type (Callable) -> Callable
677 @functools.wraps(filter_function)
678 def discoverer(parsed):
679 # type (Union[dict,list]) -> Iterable[Tuple]
681 params = default_params if isinstance(default_params,
682 six.string_types + (dict,)) else {}
683 filterer = validate_filter(filter_function)
684 from_dict = isinstance(parsed, dict)
686 for entry in roller(parsed):
687 if from_dict:
688 key, value = entry
689 name = filterer(key, value)
690 else:
691 name = filterer(entry)
693 if isinstance(name, six.string_types):
694 yield (name, params)
695 elif name is True and from_dict:
696 yield (key, params)
697 elif name is True and not from_dict:
698 yield (entry[0], params)
699 elif name and hasattr(name, '__iter__'):
700 for new_name in name:
701 yield (new_name, params)
703 return discoverer
705 if callable(selector):
706 return _discovery(selector)
708 if selector is None and default_params is None:
709 return _discovery(lambda *args: args[0])
711 return _discovery
714 # NOTE: Currently this is not really needed, it is just here to keep any start
715 # import in sync with our intended API.
716 # TODO: Do we really need this? Is there code which uses a star import for this
717 # module?
718 __all__ = get_check_api_context().keys()