Cleanup config.nodes_of
[check_mk.git] / tests / crawl-stats
blobd4980088b18897659f2d8ad7250da4d40719f32e
1 #!/usr/bin/env python
3 stats = eval(file("var/crawl.stats").read())
6 def print_url(title, entry):
7 print
8 print "%s:" % title
9 print "-" * 10
10 print "URL: %s" % entry["orig_url"]
11 print "Referer: %s" % entry["referer_url"]
12 print "Duration: %0.2f sec (Avg. %0.2f sec of %d tries)" % \
13 (entry["last_duration"], entry["avg_duration"],
14 entry["num_visited"])
15 print "Size: %0.2f KBytes (Avg. %0.2f KBytes of %d tries)" % \
16 (entry["last_content_size"]/1024.0,
17 entry["avg_content_size"]/1024.0,
18 entry["num_visited"])
21 print "Total:", stats["_TOTAL_"]
23 print ""
24 print "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
25 print "+ SLOWEST PAGES +"
26 print "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
27 print ""
29 for num, (key, val) in enumerate(sorted(stats.items(),
30 key=lambda x: "avg_duration" in x[1] \
31 and x[1]["avg_duration"]
32 or 0, reverse=True)[:10]):
33 if key == "_TOTAL_":
34 continue
36 print_url("%02d Slowest" % num, val)
38 print ""
39 print "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
40 print "+ LARGEST PAGES +"
41 print "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
42 print ""
44 for num, (key, val) in enumerate(sorted(stats.items(),
45 key=lambda x: "avg_content_size" in x[1] \
46 and x[1]["avg_content_size"]
47 or 0, reverse=True)[:10]):
48 if key == "_TOTAL_":
49 continue
51 print_url("%02d Largest" % num, val)