Refactoring: Moved check parameters from unsorted.py to dedicated modules (CMK-1393)
[check_mk.git] / checks / docker.include
blob3b390fe5fff5c67e704b16c09b4ba341ded797f4
1 #!/usr/bin/python
2 # -*- encoding: utf-8; py-indent-offset: 4 -*-
3 # +------------------------------------------------------------------+
4 # | ____ _ _ __ __ _ __ |
5 # | / ___| |__ ___ ___| | __ | \/ | |/ / |
6 # | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
7 # | | |___| | | | __/ (__| < | | | | . \ |
8 # | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
9 # | |
10 # | Copyright Mathias Kettner 2018 mk@mathias-kettner.de |
11 # +------------------------------------------------------------------+
13 # This file is part of Check_MK.
14 # The official homepage is at http://mathias-kettner.de/check_mk.
16 # check_mk is free software; you can redistribute it and/or modify it
17 # under the terms of the GNU General Public License as published by
18 # the Free Software Foundation in version 2. check_mk is distributed
19 # in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
20 # out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
21 # PARTICULAR PURPOSE. See the GNU General Public License for more de-
22 # tails. You should have received a copy of the GNU General Public
23 # License along with GNU Make; see the file COPYING. If not, write
24 # to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
25 # Boston, MA 02110-1301 USA.
26 import json
27 import re
30 def _docker_get_bytes(string):
31 '''get number of bytes from string
33 e.g.
34 "123GB (42%)" -> 123000000000
35 "0 B" -> 0
36 "2B" -> 2
37 "23 kB" -> 23000
38 '''
39 # remove percent
40 string = string.split('(')[0].strip()
41 tmp = re.split('([a-zA-Z]+)', string)
42 value_string = tmp[0].strip()
43 unit_string = tmp[1].strip() if len(tmp) > 1 else 'B'
44 try:
45 factor = {
46 'TB': 10**12,
47 'GB': 10**9,
48 'MB': 10**6,
49 'KB': 10**3,
50 'kB': 10**3,
51 'B': 1,
52 '': 1,
53 }[unit_string]
54 return int(float(value_string) * factor)
55 except (ValueError, TypeError):
56 return None
59 def _docker_trunc_id(hash_string):
60 '''normalize to short ID
62 Some docker commands use shortened, some long IDs:
63 Convert long ones to short ones, e.g.
64 "sha256:8b15606a9e3e430cb7ba739fde2fbb3734a19f8a59a825ffa877f9be49059817"
66 "8b15606a9e3e"
67 '''
68 long_id = hash_string.split(':', 1)[-1]
69 return long_id[:12]
72 def _get_repo_tag(string):
73 if ":" in string:
74 return tuple(string.rsplit(":", 1))
75 return string, "latest"
78 def parse_docker_node_info(info):
79 '''parse output of "docker info"'''
80 if not info:
81 return {}
83 # parse legacy json output (verisons 1.5.0 - 1.5.0p6)
84 joined = " ".join(info[0])
85 if joined.endswith("permission denied"):
86 return {}
87 try:
88 # this may contain a certificate containing newlines.
89 return json.loads(joined.replace("\n", "\\n"))
90 except ValueError:
91 pass
93 parsed = {}
94 prefix = ""
95 for row in info:
96 if not row:
97 continue
98 # remove '|', it was protecting leading whitespace
99 row0 = row[0][1:]
100 if not row0:
101 continue
102 # ignore misssing keys / pad lines that are not of "key: value" type
103 if len(row) == 1:
104 row.append('')
105 key = row0.strip()
106 value = ':'.join(row[1:]).strip()
107 # indented keys are prefixed by the last not indented key
108 if len(row0) - len(key) == 0:
109 parsed[key] = value
110 prefix = key
111 else:
112 parsed[prefix + key] = value
114 ## some modifications to match json output:
115 for key in ("Images", "Containers", "ContainersRunning", "ContainersStopped",
116 "ContainersPaused"):
117 try:
118 parsed[key] = int(parsed[key])
119 except (KeyError, ValueError):
120 pass
121 # reconstruct labels (they where not in "k: v" format)
122 parsed["Labels"] = []
123 for k in sorted(parsed.keys()): # pylint: disable=consider-iterating-dictionary
124 if k.startswith("Labels") and k != "Labels":
125 parsed["Labels"].append(k[6:] + parsed.pop(k))
126 # reconstruct swarm info:
127 if "Swarm" in parsed:
128 swarm = {"LocalNodeState": parsed["Swarm"]}
129 if "SwarmNodeID" in parsed:
130 swarm["NodeID"] = parsed.pop("SwarmNodeID")
131 if "SwarmManagers" in parsed:
132 swarm["RemoteManagers"] = parsed.pop("SwarmManagers")
133 parsed["Swarm"] = swarm
135 if "Server Version" in parsed:
136 parsed["ServerVersion"] = parsed.pop("Server Version")
137 if "Registry" in parsed:
138 parsed["IndexServerAddress"] = parsed.pop("Registry")
140 return parsed
143 def _docker_parse_table(rows, fields, keys):
144 '''docker provides us with space separated tables with field containing spaces
146 e.g.:
148 TYPE TOTAL ACTIVE SIZE RECLAIMABLE
149 Images 7 6 2.076 GB 936.9 MB (45%)
150 Containers 22 0 2.298 GB 2.298 GB (100%)
151 Local Volumes 5 5 304 B 0 B (0%)
153 if not rows or not rows[0]:
154 return []
156 indices = []
157 for field in fields:
158 rex = regex(field + r'\ *')
159 m = rex.search(rows[0][0])
160 if m is not None:
161 start, end = m.start(), m.end()
162 if end - start == len(field):
163 end = None
164 indices.append((start, end))
165 else:
166 indices.append((0, 0))
168 table = []
169 for row in rows[1:]:
170 if not row:
171 continue
172 try:
173 line = {k: row[0][i:j].strip() for k, (i, j) in zip(keys, indices)}
174 except IndexError:
175 continue
176 table.append(line)
178 return table
181 def parse_docker_system_df(info):
182 def int_or_zero(s):
183 return int(s.strip() or 0)
185 field_map = (
186 ('TYPE', 'TOTAL', 'ACTIVE', 'SIZE', 'RECLAIMABLE'),
187 ('Type', 'TotalCount', 'Active', 'Size', 'Reclaimable'),
188 (str, int_or_zero, int_or_zero, _docker_get_bytes, _docker_get_bytes),
191 try: # parse legacy json output: from 1.5.0 - 1.5.0p6
192 table = [json.loads(",".join(row)) for row in info if row]
193 except ValueError:
194 table = _docker_parse_table(info, field_map[0], field_map[1])
196 parsed = {}
197 for line in table:
198 for key, type_ in zip(field_map[1], field_map[2]):
199 v = line.get(key)
200 if v is not None:
201 line[key] = type_(v)
202 parsed[line.get("Type").lower()] = line
204 return parsed
207 def _get_json_list(info):
208 json_list = []
209 for row in info:
210 if not row:
211 continue
212 try:
213 json_list.append(json.loads(' '.join(row)))
214 except ValueError:
215 pass
216 # some buggy docker commands produce empty output
217 return [element for element in json_list if element]
220 def parse_docker_subsection_images(info):
222 table = _get_json_list(info)
224 parsed = {}
225 for item in table:
226 v = item.get("VirtualSize")
227 if v is not None:
228 item["VirtualSize"] = _docker_get_bytes(v)
229 parsed[item.get("ID")] = item
231 return parsed
234 def parse_docker_subsection_image_labels(info):
236 table = _get_json_list(info)
238 return {_docker_trunc_id(long_id): data for long_id, data in table if data is not None}
241 def parse_docker_subsection_image_inspect(info):
242 try:
243 table = json.loads(' '.join(' '.join(row) for row in info if row))
244 except ValueError:
245 return {}
246 return {_docker_trunc_id(image["Id"]): image for image in table}
249 def parse_docker_subsection_containers(info):
251 table = _get_json_list(info)
253 parsed = {}
254 for item in table:
255 image_name = item.get("Image", "")
256 item["Repository"], item["Tag"] = _get_repo_tag(image_name)
257 parsed[item.get("ID")] = item
259 return parsed
262 def _split_subsections(info):
263 subname = ''
264 subsections = {}
265 for row in info:
266 if not row:
267 continue
268 if row[0].startswith('[[[') and row[0].endswith(']]]'):
269 subname = row[0].strip('[]')
270 continue
271 subsections.setdefault(subname, []).append(row)
272 return subsections
275 def parse_docker_messed_up_labels(string):
276 '''yield key value pairs
278 'string' is in the format "key1=value1,key2=value2,...", but there
279 may be unescaped commas in the values.
282 def toggle_key_value():
283 for chunk in string.split('='):
284 for item in chunk.rsplit(',', 1):
285 yield item
287 toggler = toggle_key_value()
288 return dict(zip(toggler, toggler))
291 def parse_docker_node_images(info):
292 subsections = _split_subsections(info)
293 images = parse_docker_subsection_images(subsections.get("images", []))
294 image_labels = parse_docker_subsection_image_labels(subsections.get("image_labels", []))
295 image_inspect = parse_docker_subsection_image_inspect(subsections.get("image_inspect", []))
296 containers = parse_docker_subsection_containers(subsections.get("containers", []))
298 for image_id, pref_info in image_inspect.iteritems():
299 image = images.setdefault(image_id, {})
300 image["ID"] = image_id
301 labels = pref_info.get("Config", {}).get("Labels")
302 if labels is not None:
303 image.setdefault("__labels__", {}).update(labels)
304 image["CreatedAt"] = pref_info["Created"]
305 image["VirtualSize"] = pref_info["VirtualSize"]
306 repotags = pref_info.get("RepoTags")
307 if repotags:
308 image["Repository"], image["Tag"] = _get_repo_tag(repotags[-1])
309 else:
310 repo = pref_info.get("RepoDigest", "").split('@', 1)[0]
311 image["Repository"], image["Tag"] = _get_repo_tag(repo)
313 for image in images.itervalues():
314 image["amount_containers"] = 0
315 image.setdefault("__labels__", {})
317 for image_id, labels in image_labels.iteritems():
318 image = images.get(_docker_trunc_id(image_id))
319 if image is not None and labels is not None:
320 image["__labels__"].update(labels)
322 mapping = {(i['Repository'], i['Tag']): i['ID'] for i in images.itervalues()}
324 for cont in containers.itervalues():
325 image_id = mapping.get((cont["Repository"], cont["Tag"]))
326 image = images.get(image_id)
327 if image is not None:
328 image["amount_containers"] += 1
330 labels = cont.get("Labels")
331 if isinstance(labels, (str, unicode)):
332 cont["Labels"] = parse_docker_messed_up_labels(labels)
334 return {"images": images, "containers": containers}
337 def parse_docker_network_inspect(info):
338 try:
339 return json.loads(''.join(row[0] for row in info if row))
340 except ValueError:
341 return []