4 This program is free software; you can redistribute it and/or modify
5 it under the terms of the Revised BSD License.
7 This program is distributed in the hope that it will be useful,
8 but WITHOUT ANY WARRANTY; without even the implied warranty of
9 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 Revised BSD License for more details.
12 Copyright 2016-2023 Game Maker 2k - https://github.com/GameMaker2k
13 Copyright 2016-2023 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
15 $FileInfo: pywwwget.py - Last Update: 9/10/2023 Ver. 0.5.4 RC 1 - Author: cooldude2k $
18 from __future__
import division
, absolute_import
, print_function
;
19 import re
, os
, sys
, hashlib
, shutil
, platform
, tempfile
, urllib
, gzip
, time
, argparse
, cgi
, subprocess
;
20 import logging
as log
;
27 havemechanize
= False;
32 havemechanize
= False;
33 if(sys
.version
[0]=="2"):
35 from cStringIO
import StringIO
;
37 from StringIO
import StringIO
;
38 # From http://python-future.org/compatible_idioms.html
39 from urlparse
import urlparse
, urlunparse
, urlsplit
, urlunsplit
, urljoin
;
40 from urllib
import urlencode
;
41 from urllib2
import urlopen
, Request
, install_opener
, HTTPError
;
42 import urllib2
, urlparse
, cookielib
;
43 if(sys
.version
[0]>="3"):
44 from io
import StringIO
, BytesIO
;
45 # From http://python-future.org/compatible_idioms.html
46 from urllib
.parse
import urlparse
, urlunparse
, urlsplit
, urlunsplit
, urljoin
, urlencode
;
47 from urllib
.request
import urlopen
, Request
, install_opener
;
48 from urllib
.error
import HTTPError
;
49 import urllib
.request
as urllib2
;
50 import urllib
.parse
as urlparse
;
51 import http
.cookiejar
as cookielib
;
53 __program_name__
= "PyWWW-Get";
54 __program_alt_name__
= "PyWWWGet";
55 __program_small_name__
= "wwwget";
56 __project__
= __program_name__
;
57 __project_url__
= "https://github.com/GameMaker2k/PyWWW-Get";
58 __version_info__
= (0, 5, 4, "RC 1", 1);
59 __version_date_info__
= (2023, 9, 10, "RC 1", 1);
60 __version_date__
= str(__version_date_info__
[0])+"."+str(__version_date_info__
[1]).zfill(2)+"."+str(__version_date_info__
[2]).zfill(2);
61 __revision__
= __version_info__
[3];
62 __revision_id__
= "$Id$";
63 if(__version_info__
[4] is not None):
64 __version_date_plusrc__
= __version_date__
+"-"+str(__version_date_info__
[4]);
65 if(__version_info__
[4] is None):
66 __version_date_plusrc__
= __version_date__
;
67 if(__version_info__
[3] is not None):
68 __version__
= str(__version_info__
[0])+"."+str(__version_info__
[1])+"."+str(__version_info__
[2])+" "+str(__version_info__
[3]);
69 if(__version_info__
[3] is None):
70 __version__
= str(__version_info__
[0])+"."+str(__version_info__
[1])+"."+str(__version_info__
[2]);
72 tmpfileprefix
= "py"+str(sys
.version_info
[0])+__program_small_name__
+str(__version_info__
[0])+"-";
74 pytempdir
= tempfile
.gettempdir();
76 geturls_cj
= cookielib
.CookieJar();
77 geturls_ua_firefox_windows7
= "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:61.0) Gecko/20100101 Firefox/61.0";
78 geturls_ua_seamonkey_windows7
= "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:52.0) Gecko/20100101 Firefox/52.0 SeaMonkey/2.49.3";
79 geturls_ua_chrome_windows7
= "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36";
80 geturls_ua_chromium_windows7
= "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chromium/67.0.3396.99 Chrome/67.0.3396.99 Safari/537.36";
81 geturls_ua_palemoon_windows7
= "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:52.9) Gecko/20100101 Goanna/3.4 Firefox/52.9 PaleMoon/27.9.3";
82 geturls_ua_opera_windows7
= "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Safari/537.36 OPR/54.0.2952.54";
83 geturls_ua_vivaldi_windows7
= "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.183 Safari/537.36 Vivaldi/1.96.1147.52";
84 geturls_ua_internet_explorer_windows7
= "Mozilla/5.0 (Windows NT 6.1; Win64; x64; Trident/7.0; rv:11.0) like Gecko";
85 geturls_ua_microsoft_edge_windows7
= "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/17.17134";
86 geturls_ua_pywwwget_python
= "Mozilla/5.0 (compatible; {proname}/{prover}; +{prourl})".format(proname
=__project__
, prover
=__version__
, prourl
=__project_url__
);
87 if(platform
.python_implementation()!=""):
88 geturls_ua_pywwwget_python_alt
= "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver
=platform
.system()+" "+platform
.release(), archtype
=platform
.machine(), prourl
=__project_url__
, pyimp
=platform
.python_implementation(), pyver
=platform
.python_version(), proname
=__project__
, prover
=__version__
);
89 if(platform
.python_implementation()==""):
90 geturls_ua_pywwwget_python_alt
= "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver
=platform
.system()+" "+platform
.release(), archtype
=platform
.machine(), prourl
=__project_url__
, pyimp
="Python", pyver
=platform
.python_version(), proname
=__project__
, prover
=__version__
);
91 geturls_ua_googlebot_google
= "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)";
92 geturls_ua_googlebot_google_old
= "Googlebot/2.1 (+http://www.google.com/bot.html)";
93 geturls_ua
= geturls_ua_firefox_windows7
;
94 geturls_headers_firefox_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_firefox_windows7
, 'Accept-Encoding': "gzip, deflate", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
95 geturls_headers_seamonkey_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_seamonkey_windows7
, 'Accept-Encoding': "gzip, deflate", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
96 geturls_headers_chrome_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_chrome_windows7
, 'Accept-Encoding': "gzip, deflate", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
97 geturls_headers_chromium_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_chromium_windows7
, 'Accept-Encoding': "gzip, deflate", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
98 geturls_headers_palemoon_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_palemoon_windows7
, 'Accept-Encoding': "gzip, deflate", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
99 geturls_headers_opera_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_opera_windows7
, 'Accept-Encoding': "gzip, deflate", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
100 geturls_headers_vivaldi_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_vivaldi_windows7
, 'Accept-Encoding': "gzip, deflate", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
101 geturls_headers_internet_explorer_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_internet_explorer_windows7
, 'Accept-Encoding': "gzip, deflate", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
102 geturls_headers_microsoft_edge_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_microsoft_edge_windows7
, 'Accept-Encoding': "gzip, deflate", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
103 geturls_headers_pywwwget_python
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_pywwwget_python
, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
104 geturls_headers_pywwwget_python_alt
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_pywwwget_python_alt
, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
105 geturls_headers_googlebot_google
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_googlebot_google
, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
106 geturls_headers_googlebot_google_old
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_googlebot_google_old
, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
107 geturls_headers
= geturls_headers_firefox_windows7
;
108 geturls_download_sleep
= 0;
110 def verbose_printout(dbgtxt
, outtype
="log", dbgenable
=True, dgblevel
=20):
111 if(outtype
=="print" and dbgenable
):
114 elif(outtype
=="log" and dbgenable
):
115 logging
.info(dbgtxt
);
117 elif(outtype
=="warning" and dbgenable
):
118 logging
.warning(dbgtxt
);
120 elif(outtype
=="error" and dbgenable
):
121 logging
.error(dbgtxt
);
123 elif(outtype
=="critical" and dbgenable
):
124 logging
.critical(dbgtxt
);
126 elif(outtype
=="exception" and dbgenable
):
127 logging
.exception(dbgtxt
);
129 elif(outtype
=="logalt" and dbgenable
):
130 logging
.log(dgblevel
, dbgtxt
);
132 elif(outtype
=="debug" and dbgenable
):
133 logging
.debug(dbgtxt
);
141 def verbose_printout_return(dbgtxt
, outtype
="log", dbgenable
=True, dgblevel
=20):
142 verbose_printout(dbgtxt
, outtype
, dbgenable
, dgblevel
);
145 def add_url_param(url
, **params
):
147 parts
= list(urlparse
.urlsplit(url
));
148 d
= dict(cgi
.parse_qsl(parts
[n
])); # use cgi.parse_qs for list values
150 parts
[n
]=urlencode(d
);
151 return urlparse
.urlunsplit(parts
);
153 os
.environ
["PATH"] = os
.environ
["PATH"] + os
.pathsep
+ os
.path
.dirname(os
.path
.realpath(__file__
)) + os
.pathsep
+ os
.getcwd();
154 def which_exec(execfile):
155 for path
in os
.environ
["PATH"].split(":"):
156 if os
.path
.exists(path
+ "/" + execfile):
157 return path
+ "/" + execfile;
159 def listize(varlist
):
167 newlistreg
.update({ilx
: varlist
[il
]});
168 newlistrev
.update({varlist
[il
]: ilx
});
171 newlistfull
= {1: newlistreg
, 2: newlistrev
, 'reg': newlistreg
, 'rev': newlistrev
};
174 def twolistize(varlist
):
184 newlistnamereg
.update({ilx
: varlist
[il
][0].strip()});
185 newlistnamerev
.update({varlist
[il
][0].strip(): ilx
});
186 newlistdescreg
.update({ilx
: varlist
[il
][1].strip()});
187 newlistdescrev
.update({varlist
[il
][1].strip(): ilx
});
190 newlistnametmp
= {1: newlistnamereg
, 2: newlistnamerev
, 'reg': newlistnamereg
, 'rev': newlistnamerev
};
191 newlistdesctmp
= {1: newlistdescreg
, 2: newlistdescrev
, 'reg': newlistdescreg
, 'rev': newlistdescrev
};
192 newlistfull
= {1: newlistnametmp
, 2: newlistdesctmp
, 'name': newlistnametmp
, 'desc': newlistdesctmp
}
195 def arglistize(proexec
, *varlist
):
199 newarglist
= [proexec
];
201 if varlist
[il
][0] is not None:
202 newarglist
.append(varlist
[il
][0]);
203 if varlist
[il
][1] is not None:
204 newarglist
.append(varlist
[il
][1]);
208 # hms_string by ArcGIS Python Recipes
209 # https://arcpy.wordpress.com/2012/04/20/146/
210 def hms_string(sec_elapsed
):
211 h
= int(sec_elapsed
/ (60 * 60));
212 m
= int((sec_elapsed
% (60 * 60)) / 60);
213 s
= sec_elapsed
% 60.0;
214 return "{}:{:>02}:{:>05.2f}".format(h
, m
, s
);
216 # get_readable_size by Lipis
217 # http://stackoverflow.com/posts/14998888/revisions
218 def get_readable_size(bytes
, precision
=1, unit
="IEC"):
220 if(unit
!="IEC" and unit
!="SI"):
223 units
= [" B"," KiB"," MiB"," GiB"," TiB"," PiB"," EiB"," ZiB"];
224 unitswos
= ["B","KiB","MiB","GiB","TiB","PiB","EiB","ZiB"];
227 units
= [" B"," kB"," MB"," GB"," TB"," PB"," EB"," ZB"];
228 unitswos
= ["B","kB","MB","GB","TB","PB","EB","ZB"];
233 if abs(bytes
) < unitsize
:
234 strformat
= "%3."+str(precision
)+"f%s";
235 pre_return_val
= (strformat
% (bytes
, unit
));
236 pre_return_val
= re
.sub(r
"([0]+) ([A-Za-z]+)", r
" \2", pre_return_val
);
237 pre_return_val
= re
.sub(r
"\. ([A-Za-z]+)", r
" \1", pre_return_val
);
238 alt_return_val
= pre_return_val
.split();
239 return_val
= {'Bytes': orgbytes
, 'ReadableWithSuffix': pre_return_val
, 'ReadableWithoutSuffix': alt_return_val
[0], 'ReadableSuffix': alt_return_val
[1]}
242 strformat
= "%."+str(precision
)+"f%s";
243 pre_return_val
= (strformat
% (bytes
, "YiB"));
244 pre_return_val
= re
.sub(r
"([0]+) ([A-Za-z]+)", r
" \2", pre_return_val
);
245 pre_return_val
= re
.sub(r
"\. ([A-Za-z]+)", r
" \1", pre_return_val
);
246 alt_return_val
= pre_return_val
.split();
247 return_val
= {'Bytes': orgbytes
, 'ReadableWithSuffix': pre_return_val
, 'ReadableWithoutSuffix': alt_return_val
[0], 'ReadableSuffix': alt_return_val
[1]}
250 def get_readable_size_from_file(infile
, precision
=1, unit
="IEC", usehashes
=False, usehashtypes
="md5,sha1"):
252 usehashtypes
= usehashtypes
.lower();
253 getfilesize
= os
.path
.getsize(infile
);
254 return_val
= get_readable_size(getfilesize
, precision
, unit
);
256 hashtypelist
= usehashtypes
.split(",");
257 openfile
= open(infile
, "rb");
258 filecontents
= openfile
.read();
261 listnumend
= len(hashtypelist
);
262 while(listnumcount
< listnumend
):
263 hashtypelistlow
= hashtypelist
[listnumcount
].strip();
264 hashtypelistup
= hashtypelistlow
.upper();
265 filehash
= hashlib
.new(hashtypelistup
);
266 filehash
.update(filecontents
);
267 filegethash
= filehash
.hexdigest();
268 return_val
.update({hashtypelistup
: filegethash
});
272 def get_readable_size_from_string(instring
, precision
=1, unit
="IEC", usehashes
=False, usehashtypes
="md5,sha1"):
274 usehashtypes
= usehashtypes
.lower();
275 getfilesize
= len(instring
);
276 return_val
= get_readable_size(getfilesize
, precision
, unit
);
278 hashtypelist
= usehashtypes
.split(",");
280 listnumend
= len(hashtypelist
);
281 while(listnumcount
< listnumend
):
282 hashtypelistlow
= hashtypelist
[listnumcount
].strip();
283 hashtypelistup
= hashtypelistlow
.upper();
284 filehash
= hashlib
.new(hashtypelistup
);
285 if(sys
.version
[0]=="2"):
286 filehash
.update(instring
);
287 if(sys
.version
[0]>="3"):
288 filehash
.update(instring
.encode('utf-8'));
289 filegethash
= filehash
.hexdigest();
290 return_val
.update({hashtypelistup
: filegethash
});
294 def make_http_headers_from_dict_to_list(headers
={'Referer': "http://google.com/", 'User-Agent': geturls_ua
, 'Accept-Encoding': "gzip, deflate", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"}):
295 if isinstance(headers
, dict):
297 if(sys
.version
[0]=="2"):
298 for headkey
, headvalue
in headers
.iteritems():
299 returnval
.append((headkey
, headvalue
));
300 if(sys
.version
[0]>="3"):
301 for headkey
, headvalue
in headers
.items():
302 returnval
.append((headkey
, headvalue
));
303 elif isinstance(headers
, list):
309 def make_http_headers_from_dict_to_pycurl(headers
={'Referer': "http://google.com/", 'User-Agent': geturls_ua
, 'Accept-Encoding': "gzip, deflate", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"}):
310 if isinstance(headers
, dict):
312 if(sys
.version
[0]=="2"):
313 for headkey
, headvalue
in headers
.iteritems():
314 returnval
.append(headkey
+": "+headvalue
);
315 if(sys
.version
[0]>="3"):
316 for headkey
, headvalue
in headers
.items():
317 returnval
.append(headkey
+": "+headvalue
);
318 elif isinstance(headers
, list):
324 def make_http_headers_from_list_to_dict(headers
=[("Referer", "http://google.com/"), ("User-Agent", geturls_ua
), ("Accept-Encoding", "gzip, deflate"), ("Accept-Language", "en-US,en;q=0.8,en-CA,en-GB;q=0.6"), ("Accept-Charset", "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7"), ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), ("Connection", "close")]):
325 if isinstance(headers
, list):
330 returnval
.update({headers
[mli
][0]: headers
[mli
][1]});
332 elif isinstance(headers
, dict):
338 def get_httplib_support(checkvalue
=None):
339 global haverequests
, havemechanize
;
341 returnval
.append("urllib");
342 returnval
.append("request");
344 returnval
.append("requests");
346 returnval
.append("mechanize");
347 if(not checkvalue
is None):
348 if(checkvalue
=="urllib1" or checkvalue
=="urllib2"):
349 checkvalue
= "urllib";
350 if(checkvalue
in returnval
):
356 def check_httplib_support(checkvalue
="urllib"):
357 if(checkvalue
=="urllib1" or checkvalue
=="urllib2"):
358 checkvalue
= "urllib";
359 returnval
= get_httplib_support(checkvalue
);
362 def get_httplib_support_list():
363 returnval
= get_httplib_support(None);
366 def download_from_url(httpurl
, httpheaders
, httpcookie
, httplibuse
="urllib", sleep
=-1):
367 global geturls_download_sleep
, haverequests
, havemechanize
;
369 sleep
= geturls_download_sleep
;
370 if(httplibuse
=="urllib1" or httplibuse
=="urllib2"):
371 httplibuse
= "urllib";
372 if(not haverequests
and httplibuse
=="requests"):
373 httplibuse
= "urllib";
374 if(not havemechanize
and httplibuse
=="mechanize"):
375 httplibuse
= "urllib";
376 if(httplibuse
=="urllib"):
377 returnval
= download_from_url_with_urllib(httpurl
, httpheaders
, httpcookie
, sleep
);
378 elif(httplibuse
=="request"):
379 returnval
= download_from_url_with_request(httpurl
, httpheaders
, httpcookie
, sleep
);
380 elif(httplibuse
=="requests"):
381 returnval
= download_from_url_with_requests(httpurl
, httpheaders
, httpcookie
, sleep
);
382 elif(httplibuse
=="mechanize"):
383 returnval
= download_from_url_with_mechanize(httpurl
, httpheaders
, httpcookie
, sleep
);
388 def download_from_url_file(httpurl
, httpheaders
, httpcookie
, httplibuse
="urllib", buffersize
=524288, sleep
=-1):
389 global geturls_download_sleep
, haverequests
, havemechanize
;
391 sleep
= geturls_download_sleep
;
392 if(httplibuse
=="urllib1" or httplibuse
=="urllib2"):
393 httplibuse
= "urllib";
394 if(not haverequests
and httplibuse
=="requests"):
395 httplibuse
= "urllib";
396 if(not havemechanize
and httplibuse
=="mechanize"):
397 httplibuse
= "urllib";
398 if(httplibuse
=="urllib"):
399 returnval
= download_from_url_file_with_urllib(httpurl
, httpheaders
, httpcookie
, buffersize
, sleep
);
400 elif(httplibuse
=="request"):
401 returnval
= download_from_url_file_with_request(httpurl
, httpheaders
, httpcookie
, buffersize
, sleep
);
402 elif(httplibuse
=="requests"):
403 returnval
= download_from_url_file_with_requests(httpurl
, httpheaders
, httpcookie
, buffersize
, sleep
);
404 elif(httplibuse
=="mechanize"):
405 returnval
= download_from_url_file_with_mechanize(httpurl
, httpheaders
, httpcookie
, buffersize
, sleep
);
410 def download_from_url_to_file(httpurl
, httpheaders
, httpcookie
, httplibuse
="urllib", outfile
="-", outpath
=os
.getcwd(), buffersize
=[524288, 524288], sleep
=-1):
411 global geturls_download_sleep
, haverequests
, havemechanize
;
413 sleep
= geturls_download_sleep
;
414 if(httplibuse
=="urllib1" or httplibuse
=="urllib2"):
415 httplibuse
= "urllib";
416 if(not haverequests
and httplibuse
=="requests"):
417 httplibuse
= "urllib";
418 if(not havemechanize
and httplibuse
=="mechanize"):
419 httplibuse
= "urllib";
420 if(httplibuse
=="urllib"):
421 returnval
= download_from_url_to_file_with_urllib(httpurl
, httpheaders
, httpcookie
, outfile
, outpath
, buffersize
, sleep
);
422 elif(httplibuse
=="request"):
423 returnval
= download_from_url_to_file_with_request(httpurl
, httpheaders
, httpcookie
, outfile
, outpath
, buffersize
, sleep
);
424 elif(httplibuse
=="requests"):
425 returnval
= download_from_url_to_file_with_requests(httpurl
, httpheaders
, httpcookie
, outfile
, outpath
, buffersize
, sleep
);
426 elif(httplibuse
=="mechanize"):
427 returnval
= download_from_url_to_file_with_mechanize(httpurl
, httpheaders
, httpcookie
, outfile
, outpath
, buffersize
, sleep
);
432 def download_from_url_with_urllib(httpurl
, httpheaders
, httpcookie
, sleep
=-1):
433 global geturls_download_sleep
;
435 sleep
= geturls_download_sleep
;
436 geturls_opener
= urllib2
.build_opener(urllib2
.HTTPCookieProcessor(httpcookie
));
437 if(isinstance(httpheaders
, dict)):
438 httpheaders
= make_http_headers_from_dict_to_list(httpheaders
);
439 geturls_opener
.addheaders
= httpheaders
;
441 geturls_text
= geturls_opener
.open(httpurl
);
442 log
.info("Downloading URL "+httpurl
);
443 if(geturls_text
.info().get("Content-Encoding")=="gzip" or geturls_text
.info().get("Content-Encoding")=="deflate"):
444 if(sys
.version
[0]=="2"):
445 strbuf
= StringIO(geturls_text
.read());
446 if(sys
.version
[0]>="3"):
447 strbuf
= BytesIO(geturls_text
.read());
448 gzstrbuf
= gzip
.GzipFile(fileobj
=strbuf
);
449 returnval_content
= gzstrbuf
.read()[:];
450 if(geturls_text
.info().get("Content-Encoding")!="gzip" and geturls_text
.info().get("Content-Encoding")!="deflate"):
451 returnval_content
= geturls_text
.read()[:];
452 returnval
= {'Type': "Content", 'Content': returnval_content
, 'Headers': dict(geturls_text
.info()), 'URL': geturls_text
.geturl(), 'Code': geturls_text
.getcode()};
453 geturls_text
.close();
456 def download_from_url_file_with_urllib(httpurl
, httpheaders
, httpcookie
, buffersize
=524288, sleep
=-1):
457 global geturls_download_sleep
, tmpfileprefix
, tmpfilesuffix
;
458 exec_time_start
= time
.time();
459 myhash
= hashlib
.new("sha1");
460 if(sys
.version
[0]=="2"):
461 myhash
.update(httpurl
);
462 myhash
.update(str(buffersize
));
463 myhash
.update(str(exec_time_start
));
464 if(sys
.version
[0]>="3"):
465 myhash
.update(httpurl
.encode('utf-8'));
466 myhash
.update(str(buffersize
).encode('utf-8'));
467 myhash
.update(str(exec_time_start
).encode('utf-8'));
468 newtmpfilesuffix
= tmpfilesuffix
+ str(myhash
.hexdigest());
470 sleep
= geturls_download_sleep
;
471 geturls_opener
= urllib2
.build_opener(urllib2
.HTTPCookieProcessor(httpcookie
));
472 if(isinstance(httpheaders
, dict)):
473 httpheaders
= make_http_headers_from_dict_to_list(httpheaders
);
474 geturls_opener
.addheaders
= httpheaders
;
476 geturls_text
= geturls_opener
.open(httpurl
);
477 downloadsize
= geturls_text
.info().get('Content-Length');
478 if(downloadsize
is not None):
479 downloadsize
= int(downloadsize
);
480 if downloadsize
is None: downloadsize
= 0;
483 log
.info("Downloading URL "+httpurl
);
484 with tempfile
.NamedTemporaryFile('wb+', prefix
=tmpfileprefix
, suffix
=newtmpfilesuffix
, delete
=False) as f
:
485 tmpfilename
= f
.name
;
486 returnval
= {'Type': "File", 'Filename': tmpfilename
, 'Filesize': downloadsize
, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'Headers': dict(geturls_text
.info()), 'URL': geturls_text
.geturl(), 'Code': geturls_text
.getcode()};
488 databytes
= geturls_text
.read(buffersize
);
489 if not databytes
: break;
490 datasize
= len(databytes
);
491 fulldatasize
= datasize
+ fulldatasize
;
494 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
495 downloaddiff
= fulldatasize
- prevdownsize
;
496 log
.info("Downloading "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Downloaded "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
497 prevdownsize
= fulldatasize
;
500 geturls_text
.close();
501 exec_time_end
= time
.time();
502 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to download file.");
503 returnval
.update({'Filesize': os
.path
.getsize(tmpfilename
), 'DownloadTime': float(exec_time_start
- exec_time_end
), 'DownloadTimeReadable': hms_string(exec_time_start
- exec_time_end
)});
506 def download_from_url_to_file_with_urllib(httpurl
, httpheaders
, httpcookie
, outfile
="-", outpath
=os
.getcwd(), buffersize
=[524288, 524288], sleep
=-1):
507 global geturls_download_sleep
;
509 sleep
= geturls_download_sleep
;
510 if(not outfile
=="-"):
511 outpath
= outpath
.rstrip(os
.path
.sep
);
512 filepath
= os
.path
.realpath(outpath
+os
.path
.sep
+outfile
);
513 if(not os
.path
.exists(outpath
)):
514 os
.makedirs(outpath
);
515 if(os
.path
.exists(outpath
) and os
.path
.isfile(outpath
)):
517 if(os
.path
.exists(filepath
) and os
.path
.isdir(filepath
)):
519 pretmpfilename
= download_from_url_file_with_urllib(httpurl
, httpheaders
, httpcookie
, buffersize
[0], sleep
);
520 tmpfilename
= pretmpfilename
['Filename'];
521 downloadsize
= os
.path
.getsize(tmpfilename
);
523 log
.info("Moving file "+tmpfilename
+" to "+filepath
);
524 exec_time_start
= time
.time();
525 shutil
.move(tmpfilename
, filepath
);
526 exec_time_end
= time
.time();
527 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to move file.");
528 if(os
.path
.exists(tmpfilename
)):
529 os
.remove(tmpfilename
);
530 returnval
= {'Type': "File", 'Filename': filepath
, 'Filesize': downloadsize
, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'DownloadTime': pretmpfilename
['DownloadTime'], 'DownloadTimeReadable': pretmpfilename
['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start
- exec_time_end
), 'MoveFileTimeReadable': hms_string(exec_time_start
- exec_time_end
), 'Headers': pretmpfilename
['Headers'], 'URL': pretmpfilename
['URL'], 'Code': pretmpfilename
['Code']};
531 if(outfile
=="-" and sys
.version
[0]=="2"):
532 pretmpfilename
= download_from_url_file_with_urllib(httpurl
, httpheaders
, httpcookie
, buffersize
[0], sleep
);
533 tmpfilename
= pretmpfilename
['Filename'];
534 downloadsize
= os
.path
.getsize(tmpfilename
);
537 exec_time_start
= time
.time();
538 with
open(tmpfilename
, 'rb') as ft
:
541 databytes
= ft
.read(buffersize
[1]);
542 if not databytes
: break;
543 datasize
= len(databytes
);
544 fulldatasize
= datasize
+ fulldatasize
;
547 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
548 downloaddiff
= fulldatasize
- prevdownsize
;
549 log
.info("Copying "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Copied "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
550 prevdownsize
= fulldatasize
;
553 fdata
= f
.getvalue();
556 os
.remove(tmpfilename
);
557 exec_time_end
= time
.time();
558 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to copy file.");
559 returnval
= {'Type': "Content", 'Content': fdata
, 'Contentsize': downloadsize
, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'DownloadTime': pretmpfilename
['DownloadTime'], 'DownloadTimeReadable': pretmpfilename
['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start
- exec_time_end
), 'MoveFileTimeReadable': hms_string(exec_time_start
- exec_time_end
), 'Headers': pretmpfilename
['Headers'], 'URL': pretmpfilename
['URL'], 'Code': pretmpfilename
['Code']};
560 if(outfile
=="-" and sys
.version
[0]>="3"):
561 pretmpfilename
= download_from_url_file_with_urllib(httpurl
, httpheaders
, httpcookie
, buffersize
[0], sleep
);
562 tmpfilename
= pretmpfilename
['Filename'];
563 downloadsize
= os
.path
.getsize(tmpfilename
);
566 exec_time_start
= time
.time();
567 with
open(tmpfilename
, 'rb') as ft
:
570 databytes
= ft
.read(buffersize
[1]);
571 if not databytes
: break;
572 datasize
= len(databytes
);
573 fulldatasize
= datasize
+ fulldatasize
;
576 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
577 downloaddiff
= fulldatasize
- prevdownsize
;
578 log
.info("Copying "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Copied "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
579 prevdownsize
= fulldatasize
;
582 fdata
= f
.getvalue();
585 os
.remove(tmpfilename
);
586 exec_time_end
= time
.time();
587 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to copy file.");
588 returnval
= {'Type': "Content", 'Content': fdata
, 'Contentsize': downloadsize
, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'DownloadTime': pretmpfilename
['DownloadTime'], 'DownloadTimeReadable': pretmpfilename
['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start
- exec_time_end
), 'MoveFileTimeReadable': hms_string(exec_time_start
- exec_time_end
), 'Headers': pretmpfilename
['Headers'], 'URL': pretmpfilename
['URL'], 'Code': pretmpfilename
['Code']};
591 def download_from_url_with_request(httpurl
, httpheaders
, httpcookie
, sleep
=-1):
592 global geturls_download_sleep
;
594 sleep
= geturls_download_sleep
;
595 geturls_opener
= urllib2
.build_opener(urllib2
.HTTPCookieProcessor(httpcookie
));
596 if(isinstance(httpheaders
, dict)):
597 httpheaders
= make_http_headers_from_dict_to_list(httpheaders
);
598 geturls_opener
.addheaders
= httpheaders
;
599 urllib
.request
.install_opener(geturls_opener
);
601 httpheaders
= make_http_headers_from_list_to_dict(httpheaders
);
602 geturls_text
= urlopen(Request(httpurl
, headers
=httpheaders
));
603 log
.info("Downloading URL "+httpurl
);
604 if(geturls_text
.headers
.get("Content-Encoding")=="gzip" or geturls_text
.headers
.get("Content-Encoding")=="deflate"):
605 if(sys
.version
[0]=="2"):
606 strbuf
= StringIO(geturls_text
.read());
607 if(sys
.version
[0]>="3"):
608 strbuf
= BytesIO(geturls_text
.read());
609 gzstrbuf
= gzip
.GzipFile(fileobj
=strbuf
);
610 returnval_content
= gzstrbuf
.read()[:];
611 if(geturls_text
.headers
.get("Content-Encoding")!="gzip" and geturls_text
.headers
.get("Content-Encoding")!="deflate"):
612 returnval_content
= geturls_text
.read()[:];
613 returnval
= {'Type': "Content", 'Content': returnval_content
, 'Headers': dict(geturls_text
.headers
), 'URL': geturls_text
.geturl(), 'Code': geturls_text
.getcode()};
614 geturls_text
.close();
617 def download_from_url_file_with_request(httpurl
, httpheaders
, httpcookie
, buffersize
=524288, sleep
=-1):
618 global geturls_download_sleep
, tmpfileprefix
, tmpfilesuffix
;
619 exec_time_start
= time
.time();
620 myhash
= hashlib
.new("sha1");
621 if(sys
.version
[0]=="2"):
622 myhash
.update(httpurl
);
623 myhash
.update(str(buffersize
));
624 myhash
.update(str(exec_time_start
));
625 if(sys
.version
[0]>="3"):
626 myhash
.update(httpurl
.encode('utf-8'));
627 myhash
.update(str(buffersize
).encode('utf-8'));
628 myhash
.update(str(exec_time_start
).encode('utf-8'));
629 newtmpfilesuffix
= tmpfilesuffix
+ str(myhash
.hexdigest());
631 sleep
= geturls_download_sleep
;
632 geturls_opener
= urllib2
.build_opener(urllib2
.HTTPCookieProcessor(httpcookie
));
633 if(isinstance(httpheaders
, dict)):
634 httpheaders
= make_http_headers_from_dict_to_list(httpheaders
);
635 geturls_opener
.addheaders
= httpheaders
;
636 urllib
.request
.install_opener(geturls_opener
);
638 httpheaders
= make_http_headers_from_list_to_dict(httpheaders
);
639 geturls_text
= urlopen(Request(httpurl
, headers
=httpheaders
));
640 downloadsize
= geturls_text
.headers
.get('Content-Length');
641 if(downloadsize
is not None):
642 downloadsize
= int(downloadsize
);
643 if downloadsize
is None: downloadsize
= 0;
646 log
.info("Downloading URL "+httpurl
);
647 with tempfile
.NamedTemporaryFile('wb+', prefix
=tmpfileprefix
, suffix
=newtmpfilesuffix
, delete
=False) as f
:
648 tmpfilename
= f
.name
;
649 returnval
= {'Type': "File", 'Filename': tmpfilename
, 'Filesize': downloadsize
, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'Headers': dict(geturls_text
.headers
), 'URL': geturls_text
.geturl(), 'Code': geturls_text
.getcode()};
651 databytes
= geturls_text
.read(buffersize
);
652 if not databytes
: break;
653 datasize
= len(databytes
);
654 fulldatasize
= datasize
+ fulldatasize
;
657 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
658 downloaddiff
= fulldatasize
- prevdownsize
;
659 log
.info("Downloading "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Downloaded "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
660 prevdownsize
= fulldatasize
;
663 geturls_text
.close();
664 exec_time_end
= time
.time();
665 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to download file.");
666 returnval
.update({'Filesize': os
.path
.getsize(tmpfilename
), 'DownloadTime': float(exec_time_start
- exec_time_end
), 'DownloadTimeReadable': hms_string(exec_time_start
- exec_time_end
)});
669 def download_from_url_to_file_with_request(httpurl
, httpheaders
, httpcookie
, outfile
="-", outpath
=os
.getcwd(), buffersize
=[524288, 524288], sleep
=-1):
670 global geturls_download_sleep
;
672 sleep
= geturls_download_sleep
;
673 if(not outfile
=="-"):
674 outpath
= outpath
.rstrip(os
.path
.sep
);
675 filepath
= os
.path
.realpath(outpath
+os
.path
.sep
+outfile
);
676 if(not os
.path
.exists(outpath
)):
677 os
.makedirs(outpath
);
678 if(os
.path
.exists(outpath
) and os
.path
.isfile(outpath
)):
680 if(os
.path
.exists(filepath
) and os
.path
.isdir(filepath
)):
682 pretmpfilename
= download_from_url_file_with_request(httpurl
, httpheaders
, httpcookie
, buffersize
[0], sleep
);
683 tmpfilename
= pretmpfilename
['Filename'];
684 downloadsize
= os
.path
.getsize(tmpfilename
);
686 log
.info("Moving file "+tmpfilename
+" to "+filepath
);
687 exec_time_start
= time
.time();
688 shutil
.move(tmpfilename
, filepath
);
689 exec_time_end
= time
.time();
690 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to move file.");
691 if(os
.path
.exists(tmpfilename
)):
692 os
.remove(tmpfilename
);
693 returnval
= {'Type': "File", 'Filename': filepath
, 'Filesize': downloadsize
, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'DownloadTime': pretmpfilename
['DownloadTime'], 'DownloadTimeReadable': pretmpfilename
['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start
- exec_time_end
), 'MoveFileTimeReadable': hms_string(exec_time_start
- exec_time_end
), 'Headers': pretmpfilename
['Headers'], 'URL': pretmpfilename
['URL'], 'Code': pretmpfilename
['Code']};
694 if(outfile
=="-" and sys
.version
[0]=="2"):
695 pretmpfilename
= download_from_url_file_with_request(httpurl
, httpheaders
, httpcookie
, buffersize
[0], sleep
);
696 tmpfilename
= pretmpfilename
['Filename'];
697 downloadsize
= os
.path
.getsize(tmpfilename
);
700 exec_time_start
= time
.time();
701 with
open(tmpfilename
, 'rb') as ft
:
704 databytes
= ft
.read(buffersize
[1]);
705 if not databytes
: break;
706 datasize
= len(databytes
);
707 fulldatasize
= datasize
+ fulldatasize
;
710 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
711 downloaddiff
= fulldatasize
- prevdownsize
;
712 log
.info("Copying "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Copied "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
713 prevdownsize
= fulldatasize
;
716 fdata
= f
.getvalue();
719 os
.remove(tmpfilename
);
720 exec_time_end
= time
.time();
721 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to copy file.");
722 returnval
= {'Type': "Content", 'Content': fdata
, 'Contentsize': downloadsize
, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'DownloadTime': pretmpfilename
['DownloadTime'], 'DownloadTimeReadable': pretmpfilename
['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start
- exec_time_end
), 'MoveFileTimeReadable': hms_string(exec_time_start
- exec_time_end
), 'Headers': pretmpfilename
['Headers'], 'URL': pretmpfilename
['URL'], 'Code': pretmpfilename
['Code']};
723 if(outfile
=="-" and sys
.version
[0]>="3"):
724 pretmpfilename
= download_from_url_file_with_request(httpurl
, httpheaders
, httpcookie
, buffersize
[0], sleep
);
725 tmpfilename
= pretmpfilename
['Filename'];
726 downloadsize
= os
.path
.getsize(tmpfilename
);
729 exec_time_start
= time
.time();
730 with
open(tmpfilename
, 'rb') as ft
:
733 databytes
= ft
.read(buffersize
[1]);
734 if not databytes
: break;
735 datasize
= len(databytes
);
736 fulldatasize
= datasize
+ fulldatasize
;
739 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
740 downloaddiff
= fulldatasize
- prevdownsize
;
741 log
.info("Copying "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Copied "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
742 prevdownsize
= fulldatasize
;
745 fdata
= f
.getvalue();
748 os
.remove(tmpfilename
);
749 exec_time_end
= time
.time();
750 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to copy file.");
751 returnval
= {'Type': "Content", 'Content': fdata
, 'Contentsize': downloadsize
, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'DownloadTime': pretmpfilename
['DownloadTime'], 'DownloadTimeReadable': pretmpfilename
['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start
- exec_time_end
), 'MoveFileTimeReadable': hms_string(exec_time_start
- exec_time_end
), 'Headers': pretmpfilename
['Headers'], 'URL': pretmpfilename
['URL'], 'Code': pretmpfilename
['Code']};
755 def download_from_url_with_requests(httpurl
, httpheaders
, httpcookie
, sleep
=-1):
756 global geturls_download_sleep
;
758 sleep
= geturls_download_sleep
;
759 if(isinstance(httpheaders
, list)):
760 httpheaders
= make_http_headers_from_list_to_dict(httpheaders
);
762 geturls_text
= requests
.get(httpurl
, headers
=httpheaders
, cookies
=httpcookie
);
763 log
.info("Downloading URL "+httpurl
);
764 if(geturls_text
.headers
.get('Content-Type')=="gzip" or geturls_text
.headers
.get('Content-Type')=="deflate"):
765 if(sys
.version
[0]=="2"):
766 strbuf
= StringIO(geturls_text
.content
);
767 if(sys
.version
[0]>="3"):
768 strbuf
= BytesIO(geturls_text
.content
);
769 gzstrbuf
= gzip
.GzipFile(fileobj
=strbuf
);
770 returnval_content
= gzstrbuf
.content
[:];
771 if(geturls_text
.headers
.get('Content-Type')!="gzip" and geturls_text
.headers
.get('Content-Type')!="deflate"):
772 returnval_content
= geturls_text
.content
[:];
773 returnval
= {'Type': "Content", 'Content': returnval_content
, 'Headers': dict(geturls_text
.headers
), 'URL': geturls_text
.url
, 'Code': geturls_text
.status_code
};
774 geturls_text
.close();
777 if(not haverequests
):
778 def download_from_url_with_requests(httpurl
, httpheaders
, httpcookie
, sleep
=-1):
779 returnval
= download_from_url_with_urllib(httpurl
, httpheaders
, httpcookie
, sleep
)
783 def download_from_url_file_with_requests(httpurl
, httpheaders
, httpcookie
, buffersize
=524288, sleep
=-1):
784 global geturls_download_sleep
, tmpfileprefix
, tmpfilesuffix
;
785 exec_time_start
= time
.time();
786 myhash
= hashlib
.new("sha1");
787 if(sys
.version
[0]=="2"):
788 myhash
.update(httpurl
);
789 myhash
.update(str(buffersize
));
790 myhash
.update(str(exec_time_start
));
791 if(sys
.version
[0]>="3"):
792 myhash
.update(httpurl
.encode('utf-8'));
793 myhash
.update(str(buffersize
).encode('utf-8'));
794 myhash
.update(str(exec_time_start
).encode('utf-8'));
795 newtmpfilesuffix
= tmpfilesuffix
+ str(myhash
.hexdigest());
797 sleep
= geturls_download_sleep
;
798 if(isinstance(httpheaders
, list)):
799 httpheaders
= make_http_headers_from_list_to_dict(httpheaders
);
801 geturls_text
= requests
.get(httpurl
, headers
=httpheaders
, cookies
=httpcookie
, stream
=True);
802 downloadsize
= int(geturls_text
.headers
.get('Content-Length'));
803 if(downloadsize
is not None):
804 downloadsize
= int(downloadsize
);
805 if downloadsize
is None: downloadsize
= 0;
808 log
.info("Downloading URL "+httpurl
);
809 with tempfile
.NamedTemporaryFile('wb+', prefix
=tmpfileprefix
, suffix
=newtmpfilesuffix
, delete
=False) as f
:
810 tmpfilename
= f
.name
;
811 returnval
= {'Type': "File", 'Filename': tmpfilename
, 'Filesize': downloadsize
, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'Headers': dict(geturls_text
.headers
), 'URL': geturls_text
.url
, 'Code': geturls_text
.status_code
};
812 for databytes
in geturls_text
.iter_content(chunk_size
=buffersize
):
813 datasize
= len(databytes
);
814 fulldatasize
= datasize
+ fulldatasize
;
817 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
818 downloaddiff
= fulldatasize
- prevdownsize
;
819 log
.info("Downloading "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Downloaded "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
820 prevdownsize
= fulldatasize
;
823 geturls_text
.close();
824 exec_time_end
= time
.time();
825 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to download file.");
826 returnval
.update({'Filesize': os
.path
.getsize(tmpfilename
), 'DownloadTime': float(exec_time_start
- exec_time_end
), 'DownloadTimeReadable': hms_string(exec_time_start
- exec_time_end
)});
829 if(not haverequests
):
830 def download_from_url_file_with_requests(httpurl
, httpheaders
, httpcookie
, buffersize
=524288, sleep
=-1):
831 returnval
= download_from_url_file_with_urllib(httpurl
, httpheaders
, httpcookie
, buffersize
, sleep
)
835 def download_from_url_to_file_with_requests(httpurl
, httpheaders
, httpcookie
, outfile
="-", outpath
=os
.getcwd(), buffersize
=[524288, 524288], sleep
=-1):
836 global geturls_download_sleep
;
838 sleep
= geturls_download_sleep
;
839 if(not outfile
=="-"):
840 outpath
= outpath
.rstrip(os
.path
.sep
);
841 filepath
= os
.path
.realpath(outpath
+os
.path
.sep
+outfile
);
842 if(not os
.path
.exists(outpath
)):
843 os
.makedirs(outpath
);
844 if(os
.path
.exists(outpath
) and os
.path
.isfile(outpath
)):
846 if(os
.path
.exists(filepath
) and os
.path
.isdir(filepath
)):
848 pretmpfilename
= download_from_url_file_with_requests(httpurl
, httpheaders
, httpcookie
, buffersize
[0], sleep
);
849 tmpfilename
= pretmpfilename
['Filename'];
850 downloadsize
= os
.path
.getsize(tmpfilename
);
852 log
.info("Moving file "+tmpfilename
+" to "+filepath
);
853 exec_time_start
= time
.time();
854 shutil
.move(tmpfilename
, filepath
);
855 exec_time_end
= time
.time();
856 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to move file.");
857 if(os
.path
.exists(tmpfilename
)):
858 os
.remove(tmpfilename
);
859 returnval
= {'Type': "File", 'Filename': filepath
, 'Filesize': downloadsize
, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'DownloadTime': pretmpfilename
['DownloadTime'], 'DownloadTimeReadable': pretmpfilename
['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start
- exec_time_end
), 'MoveFileTimeReadable': hms_string(exec_time_start
- exec_time_end
), 'Headers': pretmpfilename
['Headers'], 'URL': pretmpfilename
['URL'], 'Code': pretmpfilename
['Code']};
860 if(outfile
=="-" and sys
.version
[0]=="2"):
861 pretmpfilename
= download_from_url_file_with_requests(httpurl
, httpheaders
, httpcookie
, buffersize
[0], sleep
);
862 tmpfilename
= pretmpfilename
['Filename'];
863 downloadsize
= os
.path
.getsize(tmpfilename
);
866 exec_time_start
= time
.time();
867 with
open(tmpfilename
, 'rb') as ft
:
870 databytes
= ft
.read(buffersize
[1]);
871 if not databytes
: break;
872 datasize
= len(databytes
);
873 fulldatasize
= datasize
+ fulldatasize
;
876 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
877 downloaddiff
= fulldatasize
- prevdownsize
;
878 log
.info("Copying "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Copied "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
879 prevdownsize
= fulldatasize
;
882 fdata
= f
.getvalue();
885 os
.remove(tmpfilename
);
886 exec_time_end
= time
.time();
887 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to copy file.");
888 returnval
= {'Type': "Content", 'Content': fdata
, 'Contentsize': downloadsize
, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'DownloadTime': pretmpfilename
['DownloadTime'], 'DownloadTimeReadable': pretmpfilename
['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start
- exec_time_end
), 'MoveFileTimeReadable': hms_string(exec_time_start
- exec_time_end
), 'Headers': pretmpfilename
['Headers'], 'URL': pretmpfilename
['URL'], 'Code': pretmpfilename
['Code']};
889 if(outfile
=="-" and sys
.version
[0]>="3"):
890 pretmpfilename
= download_from_url_file_with_requests(httpurl
, httpheaders
, httpcookie
, buffersize
[0], sleep
);
891 tmpfilename
= pretmpfilename
['Filename'];
892 downloadsize
= os
.path
.getsize(tmpfilename
);
895 exec_time_start
= time
.time();
896 with
open(tmpfilename
, 'rb') as ft
:
899 databytes
= ft
.read(buffersize
[1]);
900 if not databytes
: break;
901 datasize
= len(databytes
);
902 fulldatasize
= datasize
+ fulldatasize
;
905 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
906 downloaddiff
= fulldatasize
- prevdownsize
;
907 log
.info("Copying "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Copied "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
908 prevdownsize
= fulldatasize
;
911 fdata
= f
.getvalue();
914 os
.remove(tmpfilename
);
915 exec_time_end
= time
.time();
916 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to copy file.");
917 returnval
= {'Type': "Content", 'Content': fdata
, 'Contentsize': downloadsize
, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'DownloadTime': pretmpfilename
['DownloadTime'], 'DownloadTimeReadable': pretmpfilename
['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start
- exec_time_end
), 'MoveFileTimeReadable': hms_string(exec_time_start
- exec_time_end
), 'Headers': pretmpfilename
['Headers'], 'URL': pretmpfilename
['URL'], 'Code': pretmpfilename
['Code']};
920 if(not haverequests
):
921 def download_from_url_to_file_with_requests(httpurl
, httpheaders
, httpcookie
, outfile
="-", outpath
=os
.getcwd(), buffersize
=[524288, 524288], sleep
=-1):
922 returnval
= download_from_url_to_file_with_urllib(httpurl
, httpheaders
, httpcookie
, buffersize
, outfile
, outpath
, sleep
)
926 def download_from_url_with_mechanize(httpurl
, httpheaders
, httpcookie
, sleep
=-1):
927 global geturls_download_sleep
;
929 sleep
= geturls_download_sleep
;
930 geturls_opener
= mechanize
.Browser();
931 if(isinstance(httpheaders
, dict)):
932 httpheaders
= make_http_headers_from_dict_to_list(httpheaders
);
934 geturls_opener
.addheaders
= httpheaders
;
935 geturls_opener
.set_cookiejar(httpcookie
);
936 geturls_opener
.set_handle_robots(False);
937 geturls_text
= geturls_opener
.open(httpurl
);
938 log
.info("Downloading URL "+httpurl
);
939 if(geturls_text
.info().get("Content-Encoding")=="gzip" or geturls_text
.info().get("Content-Encoding")=="deflate"):
940 if(sys
.version
[0]=="2"):
941 strbuf
= StringIO(geturls_text
.read());
942 if(sys
.version
[0]>="3"):
943 strbuf
= BytesIO(geturls_text
.read());
944 gzstrbuf
= gzip
.GzipFile(fileobj
=strbuf
);
945 returnval_content
= gzstrbuf
.read()[:];
946 if(geturls_text
.info().get("Content-Encoding")!="gzip" and geturls_text
.info().get("Content-Encoding")!="deflate"):
947 returnval_content
= geturls_text
.read()[:];
948 returnval
= {'Type': "Content", 'Content': returnval_content
, 'Headers': dict(geturls_text
.info()), 'URL': geturls_text
.geturl(), 'Code': geturls_text
.code
};
949 geturls_text
.close();
952 if(not havemechanize
):
953 def download_from_url_with_mechanize(httpurl
, httpheaders
, httpcookie
, sleep
=-1):
954 returnval
= download_from_url_with_urllib(httpurl
, httpheaders
, httpcookie
, sleep
)
958 def download_from_url_file_with_mechanize(httpurl
, httpheaders
, httpcookie
, buffersize
=524288, sleep
=-1):
959 global geturls_download_sleep
, tmpfileprefix
, tmpfilesuffix
;
960 exec_time_start
= time
.time();
961 myhash
= hashlib
.new("sha1");
962 if(sys
.version
[0]=="2"):
963 myhash
.update(httpurl
);
964 myhash
.update(str(buffersize
));
965 myhash
.update(str(exec_time_start
));
966 if(sys
.version
[0]>="3"):
967 myhash
.update(httpurl
.encode('utf-8'));
968 myhash
.update(str(buffersize
).encode('utf-8'));
969 myhash
.update(str(exec_time_start
).encode('utf-8'));
970 newtmpfilesuffix
= tmpfilesuffix
+ str(myhash
.hexdigest());
972 sleep
= geturls_download_sleep
;
973 geturls_opener
= mechanize
.Browser();
974 if(isinstance(httpheaders
, dict)):
975 httpheaders
= make_http_headers_from_dict_to_list(httpheaders
);
977 geturls_opener
.addheaders
= httpheaders
;
978 geturls_opener
.set_cookiejar(httpcookie
);
979 geturls_opener
.set_handle_robots(False);
980 geturls_text
= geturls_opener
.open(httpurl
);
981 downloadsize
= int(geturls_text
.info().get('Content-Length'));
982 if(downloadsize
is not None):
983 downloadsize
= int(downloadsize
);
984 if downloadsize
is None: downloadsize
= 0;
987 log
.info("Downloading URL "+httpurl
);
988 with tempfile
.NamedTemporaryFile('wb+', prefix
=tmpfileprefix
, suffix
=newtmpfilesuffix
, delete
=False) as f
:
989 tmpfilename
= f
.name
;
990 returnval
= {'Type': "File", 'Filename': tmpfilename
, 'Filesize': downloadsize
, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'Headers': dict(geturls_text
.info()), 'URL': geturls_text
.geturl(), 'Code': geturls_text
.code
};
992 databytes
= geturls_text
.read(buffersize
);
993 if not databytes
: break;
994 datasize
= len(databytes
);
995 fulldatasize
= datasize
+ fulldatasize
;
998 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
999 downloaddiff
= fulldatasize
- prevdownsize
;
1000 log
.info("Downloading "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Downloaded "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
1001 prevdownsize
= fulldatasize
;
1004 geturls_text
.close();
1005 exec_time_end
= time
.time();
1006 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to download file.");
1007 returnval
.update({'Filesize': os
.path
.getsize(tmpfilename
), 'DownloadTime': float(exec_time_start
- exec_time_end
), 'DownloadTimeReadable': hms_string(exec_time_start
- exec_time_end
)});
1010 if(not havemechanize
):
1011 def download_from_url_file_with_mechanize(httpurl
, httpheaders
, httpcookie
, buffersize
=524288, sleep
=-1):
1012 returnval
= download_from_url_file_with_urllib(httpurl
, httpheaders
, httpcookie
, buffersize
, sleep
)
1016 def download_from_url_to_file_with_mechanize(httpurl
, httpheaders
, httpcookie
, outfile
="-", outpath
=os
.getcwd(), buffersize
=[524288, 524288], sleep
=-1):
1017 global geturls_download_sleep
;
1019 sleep
= geturls_download_sleep
;
1020 if(not outfile
=="-"):
1021 outpath
= outpath
.rstrip(os
.path
.sep
);
1022 filepath
= os
.path
.realpath(outpath
+os
.path
.sep
+outfile
);
1023 if(not os
.path
.exists(outpath
)):
1024 os
.makedirs(outpath
);
1025 if(os
.path
.exists(outpath
) and os
.path
.isfile(outpath
)):
1027 if(os
.path
.exists(filepath
) and os
.path
.isdir(filepath
)):
1029 pretmpfilename
= download_from_url_file_with_mechanize(httpurl
, httpheaders
, httpcookie
, buffersize
[0], sleep
);
1030 tmpfilename
= pretmpfilename
['Filename'];
1031 downloadsize
= os
.path
.getsize(tmpfilename
);
1033 log
.info("Moving file "+tmpfilename
+" to "+filepath
);
1034 exec_time_start
= time
.time();
1035 shutil
.move(tmpfilename
, filepath
);
1036 exec_time_end
= time
.time();
1037 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to move file.");
1038 if(os
.path
.exists(tmpfilename
)):
1039 os
.remove(tmpfilename
);
1040 returnval
= {'Type': "File", 'Filename': filepath
, 'Filesize': downloadsize
, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'DownloadTime': pretmpfilename
['DownloadTime'], 'DownloadTimeReadable': pretmpfilename
['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start
- exec_time_end
), 'MoveFileTimeReadable': hms_string(exec_time_start
- exec_time_end
), 'Headers': pretmpfilename
['Headers'], 'URL': pretmpfilename
['URL'], 'Code': pretmpfilename
['Code']};
1041 if(outfile
=="-" and sys
.version
[0]=="2"):
1042 pretmpfilename
= download_from_url_file_with_mechanize(httpurl
, httpheaders
, httpcookie
, buffersize
[0], sleep
);
1043 tmpfilename
= pretmpfilename
['Filename'];
1044 downloadsize
= os
.path
.getsize(tmpfilename
);
1047 exec_time_start
= time
.time();
1048 with
open(tmpfilename
, 'rb') as ft
:
1051 databytes
= ft
.read(buffersize
[1]);
1052 if not databytes
: break;
1053 datasize
= len(databytes
);
1054 fulldatasize
= datasize
+ fulldatasize
;
1057 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
1058 downloaddiff
= fulldatasize
- prevdownsize
;
1059 log
.info("Copying "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Copied "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
1060 prevdownsize
= fulldatasize
;
1063 fdata
= f
.getvalue();
1066 os
.remove(tmpfilename
);
1067 exec_time_end
= time
.time();
1068 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to copy file.");
1069 returnval
= {'Type': "Content", 'Content': fdata
, 'Contentsize': downloadsize
, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'DownloadTime': pretmpfilename
['DownloadTime'], 'DownloadTimeReadable': pretmpfilename
['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start
- exec_time_end
), 'MoveFileTimeReadable': hms_string(exec_time_start
- exec_time_end
), 'Headers': pretmpfilename
['Headers'], 'URL': pretmpfilename
['URL'], 'Code': pretmpfilename
['Code']};
1070 if(outfile
=="-" and sys
.version
[0]>="3"):
1071 pretmpfilename
= download_from_url_file_with_mechanize(httpurl
, httpheaders
, httpcookie
, buffersize
[0], sleep
);
1072 tmpfilename
= pretmpfilename
['Filename'];
1073 downloadsize
= os
.path
.getsize(tmpfilename
);
1076 exec_time_start
= time
.time();
1077 with
open(tmpfilename
, 'rb') as ft
:
1080 databytes
= ft
.read(buffersize
[1]);
1081 if not databytes
: break;
1082 datasize
= len(databytes
);
1083 fulldatasize
= datasize
+ fulldatasize
;
1086 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
1087 downloaddiff
= fulldatasize
- prevdownsize
;
1088 log
.info("Copying "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Copied "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
1089 prevdownsize
= fulldatasize
;
1092 fdata
= f
.getvalue();
1095 os
.remove(tmpfilename
);
1096 exec_time_end
= time
.time();
1097 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to copy file.");
1098 returnval
= {'Type': "Content", 'Content': fdata
, 'Contentsize': downloadsize
, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'DownloadTime': pretmpfilename
['DownloadTime'], 'DownloadTimeReadable': pretmpfilename
['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start
- exec_time_end
), 'MoveFileTimeReadable': hms_string(exec_time_start
- exec_time_end
), 'Headers': pretmpfilename
['Headers'], 'URL': pretmpfilename
['URL'], 'Code': pretmpfilename
['Code']};
1101 if(not havemechanize
):
1102 def download_from_url_to_file_with_mechanize(httpurl
, httpheaders
, httpcookie
, outfile
="-", outpath
=os
.getcwd(), buffersize
=[524288, 524288], sleep
=-1):
1103 returnval
= download_from_url_to_file_with_urllib(httpurl
, httpheaders
, httpcookie
, buffersize
, outfile
, outpath
, sleep
)