Refactoring: Changed all check parameters starting with an 'o' to the new rulespec...
[check_mk.git] / checks / aws_s3
blob5b0adf6faf57f9be86cb6319f82fbcde17a522dd
1 #!/usr/bin/python
2 # -*- encoding: utf-8; py-indent-offset: 4 -*-
3 # +------------------------------------------------------------------+
4 # | ____ _ _ __ __ _ __ |
5 # | / ___| |__ ___ ___| | __ | \/ | |/ / |
6 # | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
7 # | | |___| | | | __/ (__| < | | | | . \ |
8 # | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
9 # | |
10 # | Copyright Mathias Kettner 2018 mk@mathias-kettner.de |
11 # +------------------------------------------------------------------+
13 # This file is part of Check_MK.
14 # The official homepage is at http://mathias-kettner.de/check_mk.
16 # check_mk is free software; you can redistribute it and/or modify it
17 # under the terms of the GNU General Public License as published by
18 # the Free Software Foundation in version 2. check_mk is distributed
19 # in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
20 # out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
21 # PARTICULAR PURPOSE. See the GNU General Public License for more de-
22 # tails. You should have received a copy of the GNU General Public
23 # License along with GNU Make; see the file COPYING. If not, write
24 # to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
25 # Boston, MA 02110-1301 USA.
28 def parse_aws_s3(info):
29 parsed = {}
30 for row in parse_aws(info):
31 splitted_row = row['Id'].split("_")
32 bucket = parsed.setdefault(row['Label'], {})
33 try:
34 bucket['LocationConstraint'] = row['LocationConstraint']
35 except KeyError:
36 pass
37 try:
38 bucket['Tagging'] = row['Tagging']
39 except KeyError:
40 pass
41 inst = bucket.setdefault(splitted_row[0], {})
42 try:
43 inst.setdefault(splitted_row[1], row['Values'][0])
44 except (IndexError, ValueError):
45 pass
46 return parsed
49 # .--S3 objects----------------------------------------------------------.
50 # | ____ _____ _ _ _ |
51 # | / ___|___ / ___ | |__ (_) ___ ___| |_ ___ |
52 # | \___ \ |_ \ / _ \| '_ \| |/ _ \/ __| __/ __| |
53 # | ___) |__) | | (_) | |_) | | __/ (__| |_\__ \ |
54 # | |____/____/ \___/|_.__// |\___|\___|\__|___/ |
55 # | |__/ |
56 # '----------------------------------------------------------------------'
58 aws_s3_storage_mapping = {
59 "standardstorage": "Standard",
60 "standardiastorage": "IA",
61 "reducedredundancystorage": "Redundancy",
65 @get_parsed_item_data
66 def check_aws_s3_objects(item, params, metrics):
67 bucket_sizes = metrics['bucketsizebytes']
68 storage_infos = []
69 for storage_type, value in bucket_sizes.iteritems():
70 storage_infos.append(
71 "%s: %s" % (aws_s3_storage_mapping[storage_type], get_bytes_human_readable(value)))
72 sum_size = sum(bucket_sizes.values())
73 yield (0,
74 'Bucket size: %s (%s)' % (get_bytes_human_readable(sum_size), ", ".join(storage_infos)),
75 [('bucket_size', sum_size)])
77 num_objects = sum(metrics['numberofobjects'].values())
78 yield 0, 'Number of objects: %s' % int(num_objects), [('num_objects', num_objects)]
80 location = metrics.get('LocationConstraint')
81 if location:
82 yield 0, 'Location: %s' % location
84 tag_infos = []
85 for tag in metrics.get('Tagging', {}):
86 tag_infos.append("%s: %s" % (tag['Key'], tag['Value']))
87 if tag_infos:
88 yield 0, '[Tags] %s' % ", ".join(tag_infos)
91 check_info['aws_s3'] = {
92 'parse_function': parse_aws_s3,
93 'inventory_function': discover(),
94 'check_function': check_aws_s3_objects,
95 'service_description': 'AWS/S3 Objects %s',
96 'has_perfdata': True,
97 'includes': ['aws.include'],
101 # .--summary-------------------------------------------------------------.
102 # | |
103 # | ___ _ _ _ __ ___ _ __ ___ __ _ _ __ _ _ |
104 # | / __| | | | '_ ` _ \| '_ ` _ \ / _` | '__| | | | |
105 # | \__ \ |_| | | | | | | | | | | | (_| | | | |_| | |
106 # | |___/\__,_|_| |_| |_|_| |_| |_|\__,_|_| \__, | |
107 # | |___/ |
108 # '----------------------------------------------------------------------'
111 def check_aws_s3_summary(item, params, parsed):
112 sum_size = 0
113 largest_bucket = None
114 largest_bucket_size = 0
115 for bucket_name, bucket in parsed.iteritems():
116 bucket_size = sum(bucket['bucketsizebytes'].values())
117 sum_size += bucket_size
118 if bucket_size >= largest_bucket_size:
119 largest_bucket = bucket_name
120 largest_bucket_size = bucket_size
121 yield 0, 'Total size: %s' % get_bytes_human_readable(sum_size)
123 if largest_bucket:
124 yield 0, 'Largest bucket: %s (%s)' % \
125 (largest_bucket, get_bytes_human_readable(largest_bucket_size))
128 check_info['aws_s3.summary'] = {
129 'inventory_function': discover_single,
130 'check_function': check_aws_s3_summary,
131 'service_description': 'AWS/S3 Summary',