3 stats
= eval(file("var/crawl.stats").read())
5 def print_url(title
, entry
):
9 print "URL: %s" % entry
["orig_url"]
10 print "Referer: %s" % entry
["referer_url"]
11 print "Duration: %0.2f sec (Avg. %0.2f sec of %d tries)" % \
12 (entry
["last_duration"], entry
["avg_duration"],
14 print "Size: %0.2f KBytes (Avg. %0.2f KBytes of %d tries)" % \
15 (entry
["last_content_size"]/1024.0,
16 entry
["avg_content_size"]/1024.0,
20 print "Total:", stats
["_TOTAL_"]
23 print "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
24 print "+ SLOWEST PAGES +"
25 print "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
28 for num
, (key
, val
) in enumerate(sorted(stats
.items(),
29 key
=lambda x
: "avg_duration" in x
[1] \
30 and x
[1]["avg_duration"]
31 or 0, reverse
=True)[:10]):
35 print_url("%02d Slowest" % num
, val
)
38 print "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
39 print "+ LARGEST PAGES +"
40 print "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
43 for num
, (key
, val
) in enumerate(sorted(stats
.items(),
44 key
=lambda x
: "avg_content_size" in x
[1] \
45 and x
[1]["avg_content_size"]
46 or 0, reverse
=True)[:10]):
50 print_url("%02d Largest" % num
, val
)