2 import urllib2
, time
, threading
, sys
4 class UrlFetchSample (threading
.Thread
):
5 def __init__(self
, urlAsString
, fileName
, samplingFrequency
=5):
6 threading
.Thread
.__init
__(self
)
7 self
.urlAsString
= urlAsString
8 self
.fileName
= fileName
9 self
.samplingFrequency
= samplingFrequency
11 _file
= open(self
.fileName
, "w")
12 print "File Name: ", self
.fileName
, "\n"
13 _file
.write("URL: "+self
.urlAsString
+"\n")
14 print "Probing: "+self
.urlAsString
, "\n"
18 for index
in range(self
.samplingFrequency
):
19 iter_file_name
=self
.fileName
+"."+str(index
)
20 print "Iter File Name: ", iter_file_name
, "\n"
21 i_file
= open(iter_file_name
, "w")
22 i_file
.write("Executing sample: "+str(index
)+"\n")
23 print "Executing sample: " + self
.fileName
+ " " + str(index
), "\n"
24 start_time
=time
.time()
26 f
= urllib2
.urlopen(urllib2
.Request(self
.urlAsString
))
27 response_time
=time
.time()
36 i_file
.write("Response time: " + str(response_time
- start_time
)+"\n")
37 i_file
.write("Date retrieval time: " + str(data_time
- response_time
)+"\n")
38 i_file
.write("Duration: " + str(data_time
- start_time
)+"\n")
39 i_file
.write(str(data
))
41 total_response_time
+= (response_time
- start_time
)
42 total_duration
+= (data_time
- start_time
)
44 except urllib2
.HTTPError
:
45 print "Error! " + self
.fileName
+ " " + str(index
) + " " + self
.urlAsString
, "\n"
48 _file
.write("Average Response Time: " + str(total_response_time
/success
) + "\n")
49 _file
.write("Average Duration: " + str(total_duration
/success
) + "\n")
56 fetchers
.append(UrlFetchSample(arg
, "html-"+str(_idx
)+".log", 3))
58 for fetcher
in fetchers
: