4 This program is free software; you can redistribute it and/or modify
5 it under the terms of the Revised BSD License.
7 This program is distributed in the hope that it will be useful,
8 but WITHOUT ANY WARRANTY; without even the implied warranty of
9 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 Revised BSD License for more details.
12 Copyright 2016-2023 Game Maker 2k - https://github.com/GameMaker2k
13 Copyright 2016-2023 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
15 $FileInfo: pywwwget.py - Last Update: 10/5/2023 Ver. 2.0.2 RC 1 - Author: cooldude2k $
18 from __future__
import division
, absolute_import
, print_function
;
19 import re
, os
, sys
, hashlib
, shutil
, platform
, tempfile
, urllib
, zlib
, time
, argparse
, cgi
, subprocess
, socket
, email
.utils
, datetime
, time
;
20 import logging
as log
;
21 from ftplib
import FTP
, FTP_TLS
;
22 from base64
import b64encode
;
29 havemechanize
= False;
34 havemechanize
= False;
61 from httplib2
import HTTPConnectionWithTimeout
, HTTPSConnectionWithTimeout
;
89 if(sys
.version
[0]=="2"):
91 from io
import StringIO
, BytesIO
;
94 from cStringIO
import StringIO
;
95 from cStringIO
import StringIO
as BytesIO
;
97 from StringIO
import StringIO
;
98 from StringIO
import StringIO
as BytesIO
;
99 # From http://python-future.org/compatible_idioms.html
100 from urlparse
import urlparse
, urlunparse
, urlsplit
, urlunsplit
, urljoin
;
101 from urllib
import urlencode
;
102 from urllib
import urlopen
as urlopenalt
;
103 from urllib2
import urlopen
, Request
, install_opener
, HTTPError
, URLError
, build_opener
, HTTPCookieProcessor
;
104 import urlparse
, cookielib
;
105 from httplib
import HTTPConnection
, HTTPSConnection
;
106 if(sys
.version
[0]>="3"):
107 from io
import StringIO
, BytesIO
;
108 # From http://python-future.org/compatible_idioms.html
109 from urllib
.parse
import urlparse
, urlunparse
, urlsplit
, urlunsplit
, urljoin
, urlencode
;
110 from urllib
.request
import urlopen
, Request
, install_opener
, build_opener
, HTTPCookieProcessor
;
111 from urllib
.error
import HTTPError
, URLError
;
112 import urllib
.parse
as urlparse
;
113 import http
.cookiejar
as cookielib
;
114 from http
.client
import HTTPConnection
, HTTPSConnection
;
116 __program_name__
= "PyWWW-Get";
117 __program_alt_name__
= "PyWWWGet";
118 __program_small_name__
= "wwwget";
119 __project__
= __program_name__
;
120 __project_url__
= "https://github.com/GameMaker2k/PyWWW-Get";
121 __version_info__
= (2, 0, 2, "RC 1", 1);
122 __version_date_info__
= (2023, 10, 5, "RC 1", 1);
123 __version_date__
= str(__version_date_info__
[0])+"."+str(__version_date_info__
[1]).zfill(2)+"."+str(__version_date_info__
[2]).zfill(2);
124 __revision__
= __version_info__
[3];
125 __revision_id__
= "$Id$";
126 if(__version_info__
[4] is not None):
127 __version_date_plusrc__
= __version_date__
+"-"+str(__version_date_info__
[4]);
128 if(__version_info__
[4] is None):
129 __version_date_plusrc__
= __version_date__
;
130 if(__version_info__
[3] is not None):
131 __version__
= str(__version_info__
[0])+"."+str(__version_info__
[1])+"."+str(__version_info__
[2])+" "+str(__version_info__
[3]);
132 if(__version_info__
[3] is None):
133 __version__
= str(__version_info__
[0])+"."+str(__version_info__
[1])+"."+str(__version_info__
[2]);
135 tmpfileprefix
= "py"+str(sys
.version_info
[0])+__program_small_name__
+str(__version_info__
[0])+"-";
137 pytempdir
= tempfile
.gettempdir();
139 PyBitness
= platform
.architecture();
140 if(PyBitness
=="32bit" or PyBitness
=="32"):
142 elif(PyBitness
=="64bit" or PyBitness
=="64"):
147 compression_supported
= "gzip, deflate";
148 if(havebrotli
and not havezstd
):
149 compression_supported
= "gzip, deflate, br";
150 elif(not havebrotli
and havezstd
):
151 compression_supported
= "gzip, deflate, zstd";
152 elif(havebrotli
and havezstd
):
153 compression_supported
= "gzip, deflate, zstd, br";
155 compression_supported
= "gzip, deflate";
157 geturls_cj
= cookielib
.CookieJar();
158 windowsNT4_ua_string
= "Windows NT 4.0";
159 windowsNT4_ua_addon
= {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "4.0.0"};
160 windows2k_ua_string
= "Windows NT 5.0";
161 windows2k_ua_addon
= {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "5.0.0"};
162 windowsXP_ua_string
= "Windows NT 5.1";
163 windowsXP_ua_addon
= {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "5.1.0"};
164 windowsXP64_ua_string
= "Windows NT 5.2; Win64; x64";
165 windowsXP64_ua_addon
= {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "5.1.0"};
166 windows7_ua_string
= "Windows NT 6.1; Win64; x64";
167 windows7_ua_addon
= {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "6.1.0"};
168 windows8_ua_string
= "Windows NT 6.2; Win64; x64";
169 windows8_ua_addon
= {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "6.2.0"};
170 windows81_ua_string
= "Windows NT 6.3; Win64; x64";
171 windows81_ua_addon
= {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "6.3.0"};
172 windows10_ua_string
= "Windows NT 10.0; Win64; x64";
173 windows10_ua_addon
= {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "10.0.0"};
174 windows11_ua_string
= "Windows NT 11.0; Win64; x64";
175 windows11_ua_addon
= {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "11.0.0"};
176 geturls_ua_firefox_windows7
= "Mozilla/5.0 ("+windows7_ua_string
+"; rv:109.0) Gecko/20100101 Firefox/117.0";
177 geturls_ua_seamonkey_windows7
= "Mozilla/5.0 ("+windows7_ua_string
+"; rv:91.0) Gecko/20100101 Firefox/91.0 SeaMonkey/2.53.17";
178 geturls_ua_chrome_windows7
= "Mozilla/5.0 ("+windows7_ua_string
+") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36";
179 geturls_ua_chromium_windows7
= "Mozilla/5.0 ("+windows7_ua_string
+") AppleWebKit/537.36 (KHTML, like Gecko) Chromium/117.0.0.0 Chrome/117.0.0.0 Safari/537.36";
180 geturls_ua_palemoon_windows7
= "Mozilla/5.0 ("+windows7_ua_string
+"; rv:102.0) Gecko/20100101 Goanna/6.3 Firefox/102.0 PaleMoon/32.4.0.1";
181 geturls_ua_opera_windows7
= "Mozilla/5.0 ("+windows7_ua_string
+") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36 OPR/102.0.0.0";
182 geturls_ua_vivaldi_windows7
= "Mozilla/5.0 ("+windows7_ua_string
+") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36 Vivaldi/6.2.3105.48";
183 geturls_ua_internet_explorer_windows7
= "Mozilla/5.0 ("+windows7_ua_string
+"; Trident/7.0; rv:11.0) like Gecko";
184 geturls_ua_microsoft_edge_windows7
= "Mozilla/5.0 ("+windows7_ua_string
+") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36 Edg/117.0.2045.31";
185 geturls_ua_pywwwget_python
= "Mozilla/5.0 (compatible; {proname}/{prover}; +{prourl})".format(proname
=__project__
, prover
=__version__
, prourl
=__project_url__
);
186 if(platform
.python_implementation()!=""):
187 py_implementation
= platform
.python_implementation();
188 if(platform
.python_implementation()==""):
189 py_implementation
= "Python";
190 geturls_ua_pywwwget_python_alt
= "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver
=platform
.system()+" "+platform
.release(), archtype
=platform
.machine(), prourl
=__project_url__
, pyimp
=py_implementation
, pyver
=platform
.python_version(), proname
=__project__
, prover
=__version__
);
191 geturls_ua_googlebot_google
= "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)";
192 geturls_ua_googlebot_google_old
= "Googlebot/2.1 (+http://www.google.com/bot.html)";
193 geturls_ua
= geturls_ua_firefox_windows7
;
194 geturls_headers_firefox_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_firefox_windows7
, 'Accept-Encoding': compression_supported
, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
195 geturls_headers_seamonkey_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_seamonkey_windows7
, 'Accept-Encoding': compression_supported
, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
196 geturls_headers_chrome_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_chrome_windows7
, 'Accept-Encoding': compression_supported
, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Google Chrome\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Chromium\";v=\"117\"", 'SEC-CH-UA-FULL-VERSION': "117.0.5938.63"};
197 geturls_headers_chrome_windows7
.update(windows7_ua_addon
);
198 geturls_headers_chromium_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_chromium_windows7
, 'Accept-Encoding': compression_supported
, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Chromium\";v=\"117\", \"Not;A=Brand\";v=\"24\"", 'SEC-CH-UA-FULL-VERSION': "117.0.5938.63"};
199 geturls_headers_chromium_windows7
.update(windows7_ua_addon
);
200 geturls_headers_palemoon_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_palemoon_windows7
, 'Accept-Encoding': compression_supported
, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
201 geturls_headers_opera_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_opera_windows7
, 'Accept-Encoding': compression_supported
, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Chromium\";v=\"116\", \"Not;A=Brand\";v=\"8\", \"Opera\";v=\"102\"", 'SEC-CH-UA-FULL-VERSION': "102.0.4880.56"};
202 geturls_headers_opera_windows7
.update(windows7_ua_addon
);
203 geturls_headers_vivaldi_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_vivaldi_windows7
, 'Accept-Encoding': compression_supported
, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Google Chrome\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Vivaldi\";v=\"6.2\"", 'SEC-CH-UA-FULL-VERSION': "6.2.3105.48"};
204 geturls_headers_vivaldi_windows7
.update(windows7_ua_addon
);
205 geturls_headers_internet_explorer_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_internet_explorer_windows7
, 'Accept-Encoding': compression_supported
, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
206 geturls_headers_microsoft_edge_windows7
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_microsoft_edge_windows7
, 'Accept-Encoding': compression_supported
, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Microsoft Edge\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Chromium\";v=\"117\"", 'SEC-CH-UA-FULL-VERSION': "117.0.2045.31"}
207 geturls_headers_microsoft_edge_windows7
.update(windows7_ua_addon
);
208 geturls_headers_pywwwget_python
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_pywwwget_python
, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\""+__project__
+"\";v=\""+str(__version__
)+"\", \"Not;A=Brand\";v=\"8\", \""+py_implementation
+"\";v=\""+str(platform
.release())+"\"", 'SEC-CH-UA-FULL-VERSION': str(__version__
), 'SEC-CH-UA-PLATFORM': ""+py_implementation
+"", 'SEC-CH-UA-ARCH': ""+platform
.machine()+"", 'SEC-CH-UA-PLATFORM': str(__version__
), 'SEC-CH-UA-BITNESS': str(PyBitness
)};
209 geturls_headers_pywwwget_python_alt
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_pywwwget_python_alt
, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\""+__project__
+"\";v=\""+str(__version__
)+"\", \"Not;A=Brand\";v=\"8\", \""+py_implementation
+"\";v=\""+str(platform
.release())+"\"", 'SEC-CH-UA-FULL-VERSION': str(__version__
), 'SEC-CH-UA-PLATFORM': ""+py_implementation
+"", 'SEC-CH-UA-ARCH': ""+platform
.machine()+"", 'SEC-CH-UA-PLATFORM': str(__version__
), 'SEC-CH-UA-BITNESS': str(PyBitness
)};
210 geturls_headers_googlebot_google
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_googlebot_google
, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
211 geturls_headers_googlebot_google_old
= {'Referer': "http://google.com/", 'User-Agent': geturls_ua_googlebot_google_old
, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
212 geturls_headers
= geturls_headers_firefox_windows7
;
213 geturls_download_sleep
= 0;
215 def verbose_printout(dbgtxt
, outtype
="log", dbgenable
=True, dgblevel
=20):
216 if(outtype
=="print" and dbgenable
):
219 elif(outtype
=="log" and dbgenable
):
220 logging
.info(dbgtxt
);
222 elif(outtype
=="warning" and dbgenable
):
223 logging
.warning(dbgtxt
);
225 elif(outtype
=="error" and dbgenable
):
226 logging
.error(dbgtxt
);
228 elif(outtype
=="critical" and dbgenable
):
229 logging
.critical(dbgtxt
);
231 elif(outtype
=="exception" and dbgenable
):
232 logging
.exception(dbgtxt
);
234 elif(outtype
=="logalt" and dbgenable
):
235 logging
.log(dgblevel
, dbgtxt
);
237 elif(outtype
=="debug" and dbgenable
):
238 logging
.debug(dbgtxt
);
246 def verbose_printout_return(dbgtxt
, outtype
="log", dbgenable
=True, dgblevel
=20):
247 dbgout
= verbose_printout(dbgtxt
, outtype
, dbgenable
, dgblevel
);
252 def add_url_param(url
, **params
):
254 parts
= list(urlparse
.urlsplit(url
));
255 d
= dict(cgi
.parse_qsl(parts
[n
])); # use cgi.parse_qs for list values
257 parts
[n
]=urlencode(d
);
258 return urlparse
.urlunsplit(parts
);
260 os
.environ
["PATH"] = os
.environ
["PATH"] + os
.pathsep
+ os
.path
.dirname(os
.path
.realpath(__file__
)) + os
.pathsep
+ os
.getcwd();
261 def which_exec(execfile):
262 for path
in os
.environ
["PATH"].split(":"):
263 if os
.path
.exists(path
+ "/" + execfile):
264 return path
+ "/" + execfile;
266 def listize(varlist
):
274 newlistreg
.update({ilx
: varlist
[il
]});
275 newlistrev
.update({varlist
[il
]: ilx
});
278 newlistfull
= {1: newlistreg
, 2: newlistrev
, 'reg': newlistreg
, 'rev': newlistrev
};
281 def twolistize(varlist
):
291 newlistnamereg
.update({ilx
: varlist
[il
][0].strip()});
292 newlistnamerev
.update({varlist
[il
][0].strip(): ilx
});
293 newlistdescreg
.update({ilx
: varlist
[il
][1].strip()});
294 newlistdescrev
.update({varlist
[il
][1].strip(): ilx
});
297 newlistnametmp
= {1: newlistnamereg
, 2: newlistnamerev
, 'reg': newlistnamereg
, 'rev': newlistnamerev
};
298 newlistdesctmp
= {1: newlistdescreg
, 2: newlistdescrev
, 'reg': newlistdescreg
, 'rev': newlistdescrev
};
299 newlistfull
= {1: newlistnametmp
, 2: newlistdesctmp
, 'name': newlistnametmp
, 'desc': newlistdesctmp
}
302 def arglistize(proexec
, *varlist
):
306 newarglist
= [proexec
];
308 if varlist
[il
][0] is not None:
309 newarglist
.append(varlist
[il
][0]);
310 if varlist
[il
][1] is not None:
311 newarglist
.append(varlist
[il
][1]);
315 def fix_header_names(header_dict
):
316 if(sys
.version
[0]=="2"):
317 header_dict
= {k
.title(): v
for k
, v
in header_dict
.iteritems()};
318 if(sys
.version
[0]>="3"):
319 header_dict
= {k
.title(): v
for k
, v
in header_dict
.items()};
322 # hms_string by ArcGIS Python Recipes
323 # https://arcpy.wordpress.com/2012/04/20/146/
324 def hms_string(sec_elapsed
):
325 h
= int(sec_elapsed
/ (60 * 60));
326 m
= int((sec_elapsed
% (60 * 60)) / 60);
327 s
= sec_elapsed
% 60.0;
328 return "{}:{:>02}:{:>05.2f}".format(h
, m
, s
);
330 # get_readable_size by Lipis
331 # http://stackoverflow.com/posts/14998888/revisions
332 def get_readable_size(bytes
, precision
=1, unit
="IEC"):
334 if(unit
!="IEC" and unit
!="SI"):
337 units
= [" B"," KiB"," MiB"," GiB"," TiB"," PiB"," EiB"," ZiB"];
338 unitswos
= ["B","KiB","MiB","GiB","TiB","PiB","EiB","ZiB"];
341 units
= [" B"," kB"," MB"," GB"," TB"," PB"," EB"," ZB"];
342 unitswos
= ["B","kB","MB","GB","TB","PB","EB","ZB"];
347 if abs(bytes
) < unitsize
:
348 strformat
= "%3."+str(precision
)+"f%s";
349 pre_return_val
= (strformat
% (bytes
, unit
));
350 pre_return_val
= re
.sub(r
"([0]+) ([A-Za-z]+)", r
" \2", pre_return_val
);
351 pre_return_val
= re
.sub(r
"\. ([A-Za-z]+)", r
" \1", pre_return_val
);
352 alt_return_val
= pre_return_val
.split();
353 return_val
= {'Bytes': orgbytes
, 'ReadableWithSuffix': pre_return_val
, 'ReadableWithoutSuffix': alt_return_val
[0], 'ReadableSuffix': alt_return_val
[1]}
356 strformat
= "%."+str(precision
)+"f%s";
357 pre_return_val
= (strformat
% (bytes
, "YiB"));
358 pre_return_val
= re
.sub(r
"([0]+) ([A-Za-z]+)", r
" \2", pre_return_val
);
359 pre_return_val
= re
.sub(r
"\. ([A-Za-z]+)", r
" \1", pre_return_val
);
360 alt_return_val
= pre_return_val
.split();
361 return_val
= {'Bytes': orgbytes
, 'ReadableWithSuffix': pre_return_val
, 'ReadableWithoutSuffix': alt_return_val
[0], 'ReadableSuffix': alt_return_val
[1]}
364 def get_readable_size_from_file(infile
, precision
=1, unit
="IEC", usehashes
=False, usehashtypes
="md5,sha1"):
366 usehashtypes
= usehashtypes
.lower();
367 getfilesize
= os
.path
.getsize(infile
);
368 return_val
= get_readable_size(getfilesize
, precision
, unit
);
370 hashtypelist
= usehashtypes
.split(",");
371 openfile
= open(infile
, "rb");
372 filecontents
= openfile
.read();
375 listnumend
= len(hashtypelist
);
376 while(listnumcount
< listnumend
):
377 hashtypelistlow
= hashtypelist
[listnumcount
].strip();
378 hashtypelistup
= hashtypelistlow
.upper();
379 filehash
= hashlib
.new(hashtypelistup
);
380 filehash
.update(filecontents
);
381 filegethash
= filehash
.hexdigest();
382 return_val
.update({hashtypelistup
: filegethash
});
386 def get_readable_size_from_string(instring
, precision
=1, unit
="IEC", usehashes
=False, usehashtypes
="md5,sha1"):
388 usehashtypes
= usehashtypes
.lower();
389 getfilesize
= len(instring
);
390 return_val
= get_readable_size(getfilesize
, precision
, unit
);
392 hashtypelist
= usehashtypes
.split(",");
394 listnumend
= len(hashtypelist
);
395 while(listnumcount
< listnumend
):
396 hashtypelistlow
= hashtypelist
[listnumcount
].strip();
397 hashtypelistup
= hashtypelistlow
.upper();
398 filehash
= hashlib
.new(hashtypelistup
);
399 if(sys
.version
[0]=="2"):
400 filehash
.update(instring
);
401 if(sys
.version
[0]>="3"):
402 filehash
.update(instring
.encode('utf-8'));
403 filegethash
= filehash
.hexdigest();
404 return_val
.update({hashtypelistup
: filegethash
});
408 def http_status_to_reason(code
):
411 101: 'Switching Protocols',
416 203: 'Non-Authoritative Information',
418 205: 'Reset Content',
419 206: 'Partial Content',
421 208: 'Already Reported',
423 300: 'Multiple Choices',
424 301: 'Moved Permanently',
429 307: 'Temporary Redirect',
430 308: 'Permanent Redirect',
433 402: 'Payment Required',
436 405: 'Method Not Allowed',
437 406: 'Not Acceptable',
438 407: 'Proxy Authentication Required',
439 408: 'Request Timeout',
442 411: 'Length Required',
443 412: 'Precondition Failed',
444 413: 'Payload Too Large',
446 415: 'Unsupported Media Type',
447 416: 'Range Not Satisfiable',
448 417: 'Expectation Failed',
449 421: 'Misdirected Request',
450 422: 'Unprocessable Entity',
452 424: 'Failed Dependency',
453 426: 'Upgrade Required',
454 428: 'Precondition Required',
455 429: 'Too Many Requests',
456 431: 'Request Header Fields Too Large',
457 451: 'Unavailable For Legal Reasons',
458 500: 'Internal Server Error',
459 501: 'Not Implemented',
461 503: 'Service Unavailable',
462 504: 'Gateway Timeout',
463 505: 'HTTP Version Not Supported',
464 506: 'Variant Also Negotiates',
465 507: 'Insufficient Storage',
466 508: 'Loop Detected',
468 511: 'Network Authentication Required'
470 return reasons
.get(code
, 'Unknown Status Code');
472 def ftp_status_to_reason(code
):
474 110: 'Restart marker reply',
475 120: 'Service ready in nnn minutes',
476 125: 'Data connection already open; transfer starting',
477 150: 'File status okay; about to open data connection',
479 202: 'Command not implemented, superfluous at this site',
480 211: 'System status, or system help reply',
481 212: 'Directory status',
484 215: 'NAME system type',
485 220: 'Service ready for new user',
486 221: 'Service closing control connection',
487 225: 'Data connection open; no transfer in progress',
488 226: 'Closing data connection',
489 227: 'Entering Passive Mode',
490 230: 'User logged in, proceed',
491 250: 'Requested file action okay, completed',
492 257: '"PATHNAME" created',
493 331: 'User name okay, need password',
494 332: 'Need account for login',
495 350: 'Requested file action pending further information',
496 421: 'Service not available, closing control connection',
497 425: 'Can\'t open data connection',
498 426: 'Connection closed; transfer aborted',
499 450: 'Requested file action not taken',
500 451: 'Requested action aborted. Local error in processing',
501 452: 'Requested action not taken. Insufficient storage space in system',
502 500: 'Syntax error, command unrecognized',
503 501: 'Syntax error in parameters or arguments',
504 502: 'Command not implemented',
505 503: 'Bad sequence of commands',
506 504: 'Command not implemented for that parameter',
507 530: 'Not logged in',
508 532: 'Need account for storing files',
509 550: 'Requested action not taken. File unavailable',
510 551: 'Requested action aborted. Page type unknown',
511 552: 'Requested file action aborted. Exceeded storage allocation',
512 553: 'Requested action not taken. File name not allowed'
514 return reasons
.get(code
, 'Unknown Status Code');
516 def sftp_status_to_reason(code
):
520 2: 'SSH_FX_NO_SUCH_FILE',
521 3: 'SSH_FX_PERMISSION_DENIED',
523 5: 'SSH_FX_BAD_MESSAGE',
524 6: 'SSH_FX_NO_CONNECTION',
525 7: 'SSH_FX_CONNECTION_LOST',
526 8: 'SSH_FX_OP_UNSUPPORTED'
528 return reasons
.get(code
, 'Unknown Status Code');
530 def make_http_headers_from_dict_to_list(headers
={'Referer': "http://google.com/", 'User-Agent': geturls_ua
, 'Accept-Encoding': compression_supported
, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"}):
531 if isinstance(headers
, dict):
533 if(sys
.version
[0]=="2"):
534 for headkey
, headvalue
in headers
.iteritems():
535 returnval
.append((headkey
, headvalue
));
536 if(sys
.version
[0]>="3"):
537 for headkey
, headvalue
in headers
.items():
538 returnval
.append((headkey
, headvalue
));
539 elif isinstance(headers
, list):
545 def make_http_headers_from_dict_to_pycurl(headers
={'Referer': "http://google.com/", 'User-Agent': geturls_ua
, 'Accept-Encoding': compression_supported
, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"}):
546 if isinstance(headers
, dict):
548 if(sys
.version
[0]=="2"):
549 for headkey
, headvalue
in headers
.iteritems():
550 returnval
.append(headkey
+": "+headvalue
);
551 if(sys
.version
[0]>="3"):
552 for headkey
, headvalue
in headers
.items():
553 returnval
.append(headkey
+": "+headvalue
);
554 elif isinstance(headers
, list):
560 def make_http_headers_from_pycurl_to_dict(headers
):
562 headers
= headers
.strip().split('\r\n');
563 for header
in headers
:
564 parts
= header
.split(': ', 1)
567 header_dict
[key
.title()] = value
;
570 def make_http_headers_from_list_to_dict(headers
=[("Referer", "http://google.com/"), ("User-Agent", geturls_ua
), ("Accept-Encoding", compression_supported
), ("Accept-Language", "en-US,en;q=0.8,en-CA,en-GB;q=0.6"), ("Accept-Charset", "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7"), ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), ("Connection", "close")]):
571 if isinstance(headers
, list):
576 returnval
.update({headers
[mli
][0]: headers
[mli
][1]});
578 elif isinstance(headers
, dict):
584 def get_httplib_support(checkvalue
=None):
585 global haverequests
, havemechanize
, havehttplib2
, haveurllib3
, havehttpx
, havehttpcore
, haveparamiko
, havepysftp
;
587 returnval
.append("ftp");
588 returnval
.append("httplib");
590 returnval
.append("httplib2");
591 returnval
.append("urllib");
593 returnval
.append("urllib3");
594 returnval
.append("request3");
595 returnval
.append("request");
597 returnval
.append("requests");
599 returnval
.append("httpx");
600 returnval
.append("httpx2");
602 returnval
.append("mechanize");
604 returnval
.append("pycurl");
605 if(hasattr(pycurl
, "CURL_HTTP_VERSION_2_0")):
606 returnval
.append("pycurl2");
607 if(hasattr(pycurl
, "CURL_HTTP_VERSION_3_0")):
608 returnval
.append("pycurl3");
610 returnval
.append("sftp");
612 returnval
.append("pysftp");
613 if(not checkvalue
is None):
614 if(checkvalue
=="urllib1" or checkvalue
=="urllib2"):
615 checkvalue
= "urllib";
616 if(checkvalue
=="httplib1"):
617 checkvalue
= "httplib";
618 if(checkvalue
in returnval
):
624 def check_httplib_support(checkvalue
="urllib"):
625 if(checkvalue
=="urllib1" or checkvalue
=="urllib2"):
626 checkvalue
= "urllib";
627 if(checkvalue
=="httplib1"):
628 checkvalue
= "httplib";
629 returnval
= get_httplib_support(checkvalue
);
632 def get_httplib_support_list():
633 returnval
= get_httplib_support(None);
636 def download_from_url(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, httplibuse
="urllib", buffersize
=524288, sleep
=-1, timeout
=10):
637 global geturls_download_sleep
, havezstd
, havebrotli
, haverequests
, havemechanize
, havepycurl
, havehttplib2
, haveurllib3
, havehttpx
, havehttpcore
, haveparamiko
, havepysftp
;
639 sleep
= geturls_download_sleep
;
642 if(httplibuse
=="urllib1" or httplibuse
=="urllib2" or httplibuse
=="request"):
643 httplibuse
= "urllib";
644 if(httplibuse
=="httplib1"):
645 httplibuse
= "httplib";
646 if(not haverequests
and httplibuse
=="requests"):
647 httplibuse
= "urllib";
648 if(not havehttpx
and httplibuse
=="httpx"):
649 httplibuse
= "urllib";
650 if(not havehttpx
and httplibuse
=="httpx2"):
651 httplibuse
= "urllib";
652 if(not havehttpcore
and httplibuse
=="httpcore"):
653 httplibuse
= "urllib";
654 if(not havehttpcore
and httplibuse
=="httpcore2"):
655 httplibuse
= "urllib";
656 if(not havemechanize
and httplibuse
=="mechanize"):
657 httplibuse
= "urllib";
658 if(not havepycurl
and httplibuse
=="pycurl"):
659 httplibuse
= "urllib";
660 if(not havepycurl
and httplibuse
=="pycurl2"):
661 httplibuse
= "urllib";
662 if(havepycurl
and httplibuse
=="pycurl2" and not hasattr(pycurl
, "CURL_HTTP_VERSION_2_0")):
663 httplibuse
= "pycurl";
664 if(not havepycurl
and httplibuse
=="pycurl3"):
665 httplibuse
= "urllib";
666 if(havepycurl
and httplibuse
=="pycurl3" and not hasattr(pycurl
, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl
, "CURL_HTTP_VERSION_2_0")):
667 httplibuse
= "pycurl2";
668 if(havepycurl
and httplibuse
=="pycurl3" and not hasattr(pycurl
, "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl
, "CURL_HTTP_VERSION_2_0")):
669 httplibuse
= "pycurl";
670 if(not havehttplib2
and httplibuse
=="httplib2"):
671 httplibuse
= "httplib";
672 if(not haveparamiko
and httplibuse
=="sftp"):
674 if(not havepysftp
and httplibuse
=="pysftp"):
676 urlparts
= urlparse
.urlparse(httpurl
);
677 if(isinstance(httpheaders
, list)):
678 httpheaders
= make_http_headers_from_list_to_dict(httpheaders
);
679 httpheaders
= fix_header_names(httpheaders
);
680 if(httpuseragent
is not None):
681 if('User-Agent' in httpheaders
):
682 httpheaders
['User-Agent'] = httpuseragent
;
684 httpuseragent
.update({'User-Agent': httpuseragent
});
685 if(httpreferer
is not None):
686 if('Referer' in httpheaders
):
687 httpheaders
['Referer'] = httpreferer
;
689 httpuseragent
.update({'Referer': httpreferer
});
690 if(urlparts
.username
is not None or urlparts
.password
is not None):
691 if(sys
.version
[0]=="2"):
692 inurlencode
= b64encode(str(urlparts
.username
+":"+urlparts
.password
));
693 if(sys
.version
[0]>="3"):
694 inurlencode
= b64encode(str(urlparts
.username
+":"+urlparts
.password
).encode()).decode("UTF-8");
695 httpheaders
.update( { 'Authorization': "Basic "+inurlencode
} );
696 geturls_opener
= build_opener(HTTPCookieProcessor(httpcookie
));
697 if(httplibuse
=="urllib" or httplibuse
=="mechanize"):
698 if(isinstance(httpheaders
, dict)):
699 httpheaders
= make_http_headers_from_dict_to_list(httpheaders
);
700 if(httplibuse
=="pycurl" or httplibuse
=="pycurl2" or httplibuse
=="pycurl3"):
701 if(isinstance(httpheaders
, dict)):
702 httpheaders
= make_http_headers_from_dict_to_pycurl(httpheaders
);
703 geturls_opener
.addheaders
= httpheaders
;
705 if(postdata
is not None and not isinstance(postdata
, dict)):
706 postdata
= urlencode(postdata
);
707 if(httplibuse
=="urllib" or httplibuse
=="request"):
708 geturls_request
= Request(httpurl
);
710 if(httpmethod
=="GET"):
711 geturls_text
= geturls_opener
.open(geturls_request
);
712 elif(httpmethod
=="POST"):
713 geturls_text
= geturls_opener
.open(geturls_request
, data
=postdata
);
715 geturls_text
= geturls_opener
.open(geturls_request
);
716 except HTTPError
as geturls_text_error
:
717 geturls_text
= geturls_text_error
;
718 log
.info("Error With URL "+httpurl
);
720 log
.info("Error With URL "+httpurl
);
722 except socket
.timeout
:
723 log
.info("Error With URL "+httpurl
);
725 httpcodeout
= geturls_text
.getcode();
727 httpcodereason
= geturls_text
.reason
;
728 except AttributeError:
729 httpcodereason
= http_status_to_reason(geturls_text
.getcode());
731 httpversionout
= geturls_text
.version
;
732 except AttributeError:
733 httpversionout
= "1.1";
734 httpmethodout
= geturls_request
.get_method();
735 httpurlout
= geturls_text
.geturl();
736 httpheaderout
= geturls_text
.info();
737 httpheadersentout
= httpheaders
;
738 elif(httplibuse
=="httplib"):
739 if(urlparts
[0]=="http"):
740 httpconn
= HTTPConnection(urlparts
[1], timeout
=timeout
);
741 elif(urlparts
[0]=="https"):
742 httpconn
= HTTPSConnection(urlparts
[1], timeout
=timeout
);
745 if(postdata
is not None and not isinstance(postdata
, dict)):
746 postdata
= urlencode(postdata
);
748 if(httpmethod
=="GET"):
749 httpconn
.request("GET", urlparts
[2], headers
=httpheaders
);
750 elif(httpmethod
=="POST"):
751 httpconn
.request("GET", urlparts
[2], body
=postdata
, headers
=httpheaders
);
753 httpconn
.request("GET", urlparts
[2], headers
=httpheaders
);
754 except socket
.timeout
:
755 log
.info("Error With URL "+httpurl
);
757 except socket
.gaierror
:
758 log
.info("Error With URL "+httpurl
);
760 except BlockingIOError
:
761 log
.info("Error With URL "+httpurl
);
763 geturls_text
= httpconn
.getresponse();
764 httpcodeout
= geturls_text
.status
;
765 httpcodereason
= geturls_text
.reason
;
766 if(geturls_text
.version
=="10"):
767 httpversionout
= "1.0";
769 httpversionout
= "1.1";
770 httpmethodout
= geturls_text
._method
;
771 httpurlout
= httpurl
;
772 httpheaderout
= geturls_text
.getheaders();
773 httpheadersentout
= httpheaders
;
774 elif(httplibuse
=="httplib2"):
775 if(urlparts
[0]=="http"):
776 httpconn
= HTTPConnectionWithTimeout(urlparts
[1], timeout
=timeout
);
777 elif(urlparts
[0]=="https"):
778 httpconn
= HTTPSConnectionWithTimeout(urlparts
[1], timeout
=timeout
);
781 if(postdata
is not None and not isinstance(postdata
, dict)):
782 postdata
= urlencode(postdata
);
784 if(httpmethod
=="GET"):
785 httpconn
.request("GET", urlparts
[2], headers
=httpheaders
);
786 elif(httpmethod
=="POST"):
787 httpconn
.request("GET", urlparts
[2], body
=postdata
, headers
=httpheaders
);
789 httpconn
.request("GET", urlparts
[2], headers
=httpheaders
);
790 except socket
.timeout
:
791 log
.info("Error With URL "+httpurl
);
793 except socket
.gaierror
:
794 log
.info("Error With URL "+httpurl
);
796 except BlockingIOError
:
797 log
.info("Error With URL "+httpurl
);
799 geturls_text
= httpconn
.getresponse();
800 httpcodeout
= geturls_text
.status
;
801 httpcodereason
= geturls_text
.reason
;
802 if(geturls_text
.version
=="10"):
803 httpversionout
= "1.0";
805 httpversionout
= "1.1";
806 httpmethodout
= httpmethod
;
807 httpurlout
= httpurl
;
808 httpheaderout
= geturls_text
.getheaders();
809 httpheadersentout
= httpheaders
;
810 elif(httplibuse
=="urllib3" or httplibuse
=="request3"):
811 timeout
= urllib3
.util
.Timeout(connect
=timeout
, read
=timeout
);
812 urllib_pool
= urllib3
.PoolManager(headers
=httpheaders
, timeout
=timeout
);
814 if(httpmethod
=="GET"):
815 geturls_text
= urllib_pool
.request("GET", httpurl
, headers
=httpheaders
, preload_content
=False);
816 elif(httpmethod
=="POST"):
817 geturls_text
= urllib_pool
.request("POST", httpurl
, body
=postdata
, headers
=httpheaders
, preload_content
=False);
819 geturls_text
= urllib_pool
.request("GET", httpurl
, headers
=httpheaders
, preload_content
=False);
820 except urllib3
.exceptions
.ConnectTimeoutError
:
821 log
.info("Error With URL "+httpurl
);
823 except urllib3
.exceptions
.ConnectError
:
824 log
.info("Error With URL "+httpurl
);
826 except urllib3
.exceptions
.MaxRetryError
:
827 log
.info("Error With URL "+httpurl
);
829 except socket
.timeout
:
830 log
.info("Error With URL "+httpurl
);
833 log
.info("Error With URL "+httpurl
);
835 httpcodeout
= geturls_text
.status
;
836 httpcodereason
= geturls_text
.reason
;
837 if(geturls_text
.version
=="10"):
838 httpversionout
= "1.0";
840 httpversionout
= "1.1";
841 httpmethodout
= httpmethod
;
842 httpurlout
= geturls_text
.geturl();
843 httpheaderout
= geturls_text
.info();
844 httpheadersentout
= httpheaders
;
845 elif(httplibuse
=="requests"):
847 reqsession
= requests
.Session();
848 if(httpmethod
=="GET"):
849 geturls_text
= reqsession
.get(httpurl
, timeout
=timeout
, headers
=httpheaders
, cookies
=httpcookie
);
850 elif(httpmethod
=="POST"):
851 geturls_text
= reqsession
.post(httpurl
, timeout
=timeout
, data
=postdata
, headers
=httpheaders
, cookies
=httpcookie
);
853 geturls_text
= reqsession
.get(httpurl
, timeout
=timeout
, headers
=httpheaders
, cookies
=httpcookie
);
854 except requests
.exceptions
.ConnectTimeout
:
855 log
.info("Error With URL "+httpurl
);
857 except requests
.exceptions
.ConnectError
:
858 log
.info("Error With URL "+httpurl
);
860 except socket
.timeout
:
861 log
.info("Error With URL "+httpurl
);
863 httpcodeout
= geturls_text
.status_code
;
864 httpcodereason
= geturls_text
.reason
;
865 if(geturls_text
.raw
.version
=="10"):
866 httpversionout
= "1.0";
868 httpversionout
= "1.1";
869 httpmethodout
= httpmethod
;
870 httpurlout
= geturls_text
.url
;
871 httpheaderout
= geturls_text
.headers
;
872 httpheadersentout
= geturls_text
.request
.headers
;
873 elif(httplibuse
=="httpx"):
875 if(httpmethod
=="GET"):
876 httpx_pool
= httpx
.Client(http1
=True, http2
=False, trust_env
=True);
877 geturls_text
= httpx_pool
.get(httpurl
, timeout
=timeout
, headers
=httpheaders
, cookies
=httpcookie
);
878 elif(httpmethod
=="POST"):
879 httpx_pool
= httpx
.Client(http1
=True, http2
=False, trust_env
=True);
880 geturls_text
= httpx_pool
.post(httpurl
, timeout
=timeout
, data
=postdata
, headers
=httpheaders
, cookies
=httpcookie
);
882 httpx_pool
= httpx
.Client(http1
=True, http2
=False, trust_env
=True);
883 geturls_text
= httpx_pool
.get(httpurl
, timeout
=timeout
, headers
=httpheaders
, cookies
=httpcookie
);
884 except httpx
.ConnectTimeout
:
885 log
.info("Error With URL "+httpurl
);
887 except httpx
.ConnectError
:
888 log
.info("Error With URL "+httpurl
);
890 except socket
.timeout
:
891 log
.info("Error With URL "+httpurl
);
893 httpcodeout
= geturls_text
.status_code
;
895 httpcodereason
= geturls_text
.reason_phrase
;
897 httpcodereason
= http_status_to_reason(geturls_text
.status_code
);
898 httpversionout
= geturls_text
.http_version
;
899 httpmethodout
= httpmethod
;
900 httpurlout
= str(geturls_text
.url
);
901 httpheaderout
= geturls_text
.headers
;
902 httpheadersentout
= geturls_text
.request
.headers
;
903 elif(httplibuse
=="httpx2"):
905 if(httpmethod
=="GET"):
906 httpx_pool
= httpx
.Client(http1
=True, http2
=True, trust_env
=True);
907 geturls_text
= httpx_pool
.get(httpurl
, timeout
=timeout
, headers
=httpheaders
, cookies
=httpcookie
);
908 elif(httpmethod
=="POST"):
909 httpx_pool
= httpx
.Client(http1
=True, http2
=True, trust_env
=True);
910 geturls_text
= httpx_pool
.post(httpurl
, timeout
=timeout
, data
=postdata
, headers
=httpheaders
, cookies
=httpcookie
);
912 httpx_pool
= httpx
.Client(http1
=True, http2
=True, trust_env
=True);
913 geturls_text
= httpx_pool
.get(httpurl
, timeout
=timeout
, headers
=httpheaders
, cookies
=httpcookie
);
914 except httpx
.ConnectTimeout
:
915 log
.info("Error With URL "+httpurl
);
917 except httpx
.ConnectError
:
918 log
.info("Error With URL "+httpurl
);
920 except socket
.timeout
:
921 log
.info("Error With URL "+httpurl
);
923 httpcodeout
= geturls_text
.status_code
;
925 httpcodereason
= geturls_text
.reason_phrase
;
927 httpcodereason
= http_status_to_reason(geturls_text
.status_code
);
928 httpversionout
= geturls_text
.http_version
;
929 httpmethodout
= httpmethod
;
930 httpurlout
= str(geturls_text
.url
);
931 httpheaderout
= geturls_text
.headers
;
932 httpheadersentout
= geturls_text
.request
.headers
;
933 elif(httplibuse
=="httpcore"):
935 if(httpmethod
=="GET"):
936 httpx_pool
= httpcore
.ConnectionPool(http1
=True, http2
=False);
937 geturls_text
= httpx_pool
.request("GET", httpurl
, headers
=httpheaders
);
938 elif(httpmethod
=="POST"):
939 httpx_pool
= httpcore
.ConnectionPool(http1
=True, http2
=False);
940 geturls_text
= httpx_pool
.request("GET", httpurl
, data
=postdata
, headers
=httpheaders
);
942 httpx_pool
= httpcore
.ConnectionPool(http1
=True, http2
=False);
943 geturls_text
= httpx_pool
.request("GET", httpurl
, headers
=httpheaders
);
944 except httpcore
.ConnectTimeout
:
945 log
.info("Error With URL "+httpurl
);
947 except httpcore
.ConnectError
:
948 log
.info("Error With URL "+httpurl
);
950 except socket
.timeout
:
951 log
.info("Error With URL "+httpurl
);
953 httpcodeout
= geturls_text
.status
;
954 httpcodereason
= http_status_to_reason(geturls_text
.status
);
955 httpversionout
= "1.1";
956 httpmethodout
= httpmethod
;
957 httpurlout
= str(httpurl
);
958 httpheaderout
= geturls_text
.headers
;
959 httpheadersentout
= httpheaders
;
960 elif(httplibuse
=="httpcore2"):
962 if(httpmethod
=="GET"):
963 httpx_pool
= httpcore
.ConnectionPool(http1
=True, http2
=True);
964 geturls_text
= httpx_pool
.request("GET", httpurl
, headers
=httpheaders
);
965 elif(httpmethod
=="POST"):
966 httpx_pool
= httpcore
.ConnectionPool(http1
=True, http2
=True);
967 geturls_text
= httpx_pool
.request("GET", httpurl
, data
=postdata
, headers
=httpheaders
);
969 httpx_pool
= httpcore
.ConnectionPool(http1
=True, http2
=True);
970 geturls_text
= httpx_pool
.request("GET", httpurl
, headers
=httpheaders
);
971 except httpcore
.ConnectTimeout
:
972 log
.info("Error With URL "+httpurl
);
974 except httpcore
.ConnectError
:
975 log
.info("Error With URL "+httpurl
);
977 except socket
.timeout
:
978 log
.info("Error With URL "+httpurl
);
980 httpcodeout
= geturls_text
.status
;
981 httpcodereason
= http_status_to_reason(geturls_text
.status
);
982 httpversionout
= "1.1";
983 httpmethodout
= httpmethod
;
984 httpurlout
= str(httpurl
);
985 httpheaderout
= geturls_text
.headers
;
986 httpheadersentout
= httpheaders
;
987 elif(httplibuse
=="mechanize"):
988 geturls_opener
= mechanize
.Browser();
989 if(isinstance(httpheaders
, dict)):
990 httpheaders
= make_http_headers_from_dict_to_list(httpheaders
);
992 geturls_opener
.addheaders
= httpheaders
;
993 geturls_opener
.set_cookiejar(httpcookie
);
994 geturls_opener
.set_handle_robots(False);
995 if(postdata
is not None and not isinstance(postdata
, dict)):
996 postdata
= urlencode(postdata
);
998 if(httpmethod
=="GET"):
999 geturls_text
= geturls_opener
.open(httpurl
);
1000 elif(httpmethod
=="POST"):
1001 geturls_text
= geturls_opener
.open(httpurl
, data
=postdata
);
1003 geturls_text
= geturls_opener
.open(httpurl
);
1004 except mechanize
.HTTPError
as geturls_text_error
:
1005 geturls_text
= geturls_text_error
;
1006 log
.info("Error With URL "+httpurl
);
1008 log
.info("Error With URL "+httpurl
);
1010 except socket
.timeout
:
1011 log
.info("Error With URL "+httpurl
);
1013 httpcodeout
= geturls_text
.code
;
1014 httpcodereason
= geturls_text
.msg
;
1015 httpversionout
= "1.1";
1016 httpmethodout
= httpmethod
;
1017 httpurlout
= geturls_text
.geturl();
1018 httpheaderout
= geturls_text
.info();
1019 reqhead
= geturls_opener
.request
;
1020 httpheadersentout
= reqhead
.header_items();
1021 elif(httplibuse
=="pycurl"):
1022 retrieved_body
= BytesIO();
1023 retrieved_headers
= BytesIO();
1025 if(httpmethod
=="GET"):
1026 geturls_text
= pycurl
.Curl();
1027 geturls_text
.setopt(geturls_text
.URL
, httpurl
);
1028 geturls_text
.setopt(geturls_text
.HTTP_VERSION
, geturls_text
.CURL_HTTP_VERSION_1_1
);
1029 geturls_text
.setopt(geturls_text
.WRITEFUNCTION
, retrieved_body
.write
);
1030 geturls_text
.setopt(geturls_text
.HTTPHEADER
, httpheaders
);
1031 geturls_text
.setopt(geturls_text
.HEADERFUNCTION
, retrieved_headers
.write
);
1032 geturls_text
.setopt(geturls_text
.FOLLOWLOCATION
, True);
1033 geturls_text
.setopt(geturls_text
.TIMEOUT
, timeout
);
1034 geturls_text
.perform();
1035 elif(httpmethod
=="POST"):
1036 geturls_text
= pycurl
.Curl();
1037 geturls_text
.setopt(geturls_text
.URL
, httpurl
);
1038 geturls_text
.setopt(geturls_text
.HTTP_VERSION
, geturls_text
.CURL_HTTP_VERSION_1_1
);
1039 geturls_text
.setopt(geturls_text
.WRITEFUNCTION
, retrieved_body
.write
);
1040 geturls_text
.setopt(geturls_text
.HTTPHEADER
, httpheaders
);
1041 geturls_text
.setopt(geturls_text
.HEADERFUNCTION
, retrieved_headers
.write
);
1042 geturls_text
.setopt(geturls_text
.FOLLOWLOCATION
, True);
1043 geturls_text
.setopt(geturls_text
.TIMEOUT
, timeout
);
1044 geturls_text
.setopt(geturls_text
.POST
, True);
1045 geturls_text
.setopt(geturls_text
.POSTFIELDS
, postdata
);
1046 geturls_text
.perform();
1048 geturls_text
= pycurl
.Curl();
1049 geturls_text
.setopt(geturls_text
.URL
, httpurl
);
1050 geturls_text
.setopt(geturls_text
.HTTP_VERSION
, geturls_text
.CURL_HTTP_VERSION_1_1
);
1051 geturls_text
.setopt(geturls_text
.WRITEFUNCTION
, retrieved_body
.write
);
1052 geturls_text
.setopt(geturls_text
.HTTPHEADER
, httpheaders
);
1053 geturls_text
.setopt(geturls_text
.HEADERFUNCTION
, retrieved_headers
.write
);
1054 geturls_text
.setopt(geturls_text
.FOLLOWLOCATION
, True);
1055 geturls_text
.setopt(geturls_text
.TIMEOUT
, timeout
);
1056 geturls_text
.perform();
1057 retrieved_headers
.seek(0);
1058 if(sys
.version
[0]=="2"):
1059 pycurlhead
= retrieved_headers
.read();
1060 if(sys
.version
[0]>="3"):
1061 pycurlhead
= retrieved_headers
.read().decode('UTF-8');
1062 pyhttpverinfo
= re
.findall(r
'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', pycurlhead
.splitlines()[0].strip().rstrip('\r\n'))[0];
1063 pycurlheadersout
= make_http_headers_from_pycurl_to_dict(pycurlhead
);
1064 retrieved_body
.seek(0);
1065 except socket
.timeout
:
1066 log
.info("Error With URL "+httpurl
);
1068 except socket
.gaierror
:
1069 log
.info("Error With URL "+httpurl
);
1072 log
.info("Error With URL "+httpurl
);
1074 httpcodeout
= geturls_text
.getinfo(geturls_text
.HTTP_CODE
);
1075 httpcodereason
= http_status_to_reason(geturls_text
.getinfo(geturls_text
.HTTP_CODE
));
1076 httpversionout
= pyhttpverinfo
[0];
1077 httpmethodout
= httpmethod
;
1078 httpurlout
= geturls_text
.getinfo(geturls_text
.EFFECTIVE_URL
);
1079 httpheaderout
= pycurlheadersout
;
1080 httpheadersentout
= httpheaders
;
1081 elif(httplibuse
=="pycurl2"):
1082 retrieved_body
= BytesIO();
1083 retrieved_headers
= BytesIO();
1085 if(httpmethod
=="GET"):
1086 geturls_text
= pycurl
.Curl();
1087 geturls_text
.setopt(geturls_text
.HTTP_VERSION
, geturls_text
.CURL_HTTP_VERSION_2_0
);
1088 geturls_text
.setopt(geturls_text
.URL
, httpurl
);
1089 geturls_text
.setopt(geturls_text
.WRITEFUNCTION
, retrieved_body
.write
);
1090 geturls_text
.setopt(geturls_text
.HTTPHEADER
, httpheaders
);
1091 geturls_text
.setopt(geturls_text
.HEADERFUNCTION
, retrieved_headers
.write
);
1092 geturls_text
.setopt(geturls_text
.FOLLOWLOCATION
, True);
1093 geturls_text
.setopt(geturls_text
.TIMEOUT
, timeout
);
1094 geturls_text
.perform();
1095 elif(httpmethod
=="POST"):
1096 geturls_text
= pycurl
.Curl();
1097 geturls_text
.setopt(geturls_text
.HTTP_VERSION
, geturls_text
.CURL_HTTP_VERSION_2_0
);
1098 geturls_text
.setopt(geturls_text
.URL
, httpurl
);
1099 geturls_text
.setopt(geturls_text
.WRITEFUNCTION
, retrieved_body
.write
);
1100 geturls_text
.setopt(geturls_text
.HTTPHEADER
, httpheaders
);
1101 geturls_text
.setopt(geturls_text
.HEADERFUNCTION
, retrieved_headers
.write
);
1102 geturls_text
.setopt(geturls_text
.FOLLOWLOCATION
, True);
1103 geturls_text
.setopt(geturls_text
.TIMEOUT
, timeout
);
1104 geturls_text
.setopt(geturls_text
.POST
, True);
1105 geturls_text
.setopt(geturls_text
.POSTFIELDS
, postdata
);
1106 geturls_text
.perform();
1108 geturls_text
= pycurl
.Curl();
1109 geturls_text
.setopt(geturls_text
.HTTP_VERSION
, geturls_text
.CURL_HTTP_VERSION_2_0
);
1110 geturls_text
.setopt(geturls_text
.URL
, httpurl
);
1111 geturls_text
.setopt(geturls_text
.WRITEFUNCTION
, retrieved_body
.write
);
1112 geturls_text
.setopt(geturls_text
.HTTPHEADER
, httpheaders
);
1113 geturls_text
.setopt(geturls_text
.HEADERFUNCTION
, retrieved_headers
.write
);
1114 geturls_text
.setopt(geturls_text
.FOLLOWLOCATION
, True);
1115 geturls_text
.setopt(geturls_text
.TIMEOUT
, timeout
);
1116 geturls_text
.perform();
1117 retrieved_headers
.seek(0);
1118 if(sys
.version
[0]=="2"):
1119 pycurlhead
= retrieved_headers
.read();
1120 if(sys
.version
[0]>="3"):
1121 pycurlhead
= retrieved_headers
.read().decode('UTF-8');
1122 pyhttpverinfo
= re
.findall(r
'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', pycurlhead
.splitlines()[0].strip())[0];
1123 pycurlheadersout
= make_http_headers_from_pycurl_to_dict(pycurlhead
);
1124 retrieved_body
.seek(0);
1125 except socket
.timeout
:
1126 log
.info("Error With URL "+httpurl
);
1128 except socket
.gaierror
:
1129 log
.info("Error With URL "+httpurl
);
1132 log
.info("Error With URL "+httpurl
);
1134 httpcodeout
= geturls_text
.getinfo(geturls_text
.HTTP_CODE
);
1135 httpcodereason
= http_status_to_reason(geturls_text
.getinfo(geturls_text
.HTTP_CODE
));
1136 httpversionout
= pyhttpverinfo
[0];
1137 httpmethodout
= httpmethod
;
1138 httpurlout
= geturls_text
.getinfo(geturls_text
.EFFECTIVE_URL
);
1139 httpheaderout
= pycurlheadersout
;
1140 httpheadersentout
= httpheaders
;
1141 elif(httplibuse
=="pycurl3"):
1142 retrieved_body
= BytesIO();
1143 retrieved_headers
= BytesIO();
1145 if(httpmethod
=="GET"):
1146 geturls_text
= pycurl
.Curl();
1147 geturls_text
.setopt(geturls_text
.HTTP_VERSION
, geturls_text
.CURL_HTTP_VERSION_3_0
);
1148 geturls_text
.setopt(geturls_text
.URL
, httpurl
);
1149 geturls_text
.setopt(geturls_text
.WRITEFUNCTION
, retrieved_body
.write
);
1150 geturls_text
.setopt(geturls_text
.HTTPHEADER
, httpheaders
);
1151 geturls_text
.setopt(geturls_text
.HEADERFUNCTION
, retrieved_headers
.write
);
1152 geturls_text
.setopt(geturls_text
.FOLLOWLOCATION
, True);
1153 geturls_text
.setopt(geturls_text
.TIMEOUT
, timeout
);
1154 geturls_text
.perform();
1155 elif(httpmethod
=="POST"):
1156 geturls_text
= pycurl
.Curl();
1157 geturls_text
.setopt(geturls_text
.HTTP_VERSION
, geturls_text
.CURL_HTTP_VERSION_3_0
);
1158 geturls_text
.setopt(geturls_text
.URL
, httpurl
);
1159 geturls_text
.setopt(geturls_text
.WRITEFUNCTION
, retrieved_body
.write
);
1160 geturls_text
.setopt(geturls_text
.HTTPHEADER
, httpheaders
);
1161 geturls_text
.setopt(geturls_text
.HEADERFUNCTION
, retrieved_headers
.write
);
1162 geturls_text
.setopt(geturls_text
.FOLLOWLOCATION
, True);
1163 geturls_text
.setopt(geturls_text
.TIMEOUT
, timeout
);
1164 geturls_text
.setopt(geturls_text
.POST
, True);
1165 geturls_text
.setopt(geturls_text
.POSTFIELDS
, postdata
);
1166 geturls_text
.perform();
1168 geturls_text
= pycurl
.Curl();
1169 geturls_text
.setopt(geturls_text
.HTTP_VERSION
, geturls_text
.CURL_HTTP_VERSION_3_0
);
1170 geturls_text
.setopt(geturls_text
.URL
, httpurl
);
1171 geturls_text
.setopt(geturls_text
.WRITEFUNCTION
, retrieved_body
.write
);
1172 geturls_text
.setopt(geturls_text
.HTTPHEADER
, httpheaders
);
1173 geturls_text
.setopt(geturls_text
.HEADERFUNCTION
, retrieved_headers
.write
);
1174 geturls_text
.setopt(geturls_text
.FOLLOWLOCATION
, True);
1175 geturls_text
.setopt(geturls_text
.TIMEOUT
, timeout
);
1176 geturls_text
.perform();
1177 retrieved_headers
.seek(0);
1178 if(sys
.version
[0]=="2"):
1179 pycurlhead
= retrieved_headers
.read();
1180 if(sys
.version
[0]>="3"):
1181 pycurlhead
= retrieved_headers
.read().decode('UTF-8');
1182 pyhttpverinfo
= re
.findall(r
'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', pycurlhead
.splitlines()[0].strip().rstrip('\r\n'))[0];
1183 pycurlheadersout
= make_http_headers_from_pycurl_to_dict(pycurlhead
);
1184 retrieved_body
.seek(0);
1185 except socket
.timeout
:
1186 log
.info("Error With URL "+httpurl
);
1188 except socket
.gaierror
:
1189 log
.info("Error With URL "+httpurl
);
1192 log
.info("Error With URL "+httpurl
);
1194 httpcodeout
= geturls_text
.getinfo(geturls_text
.HTTP_CODE
);
1195 httpcodereason
= http_status_to_reason(geturls_text
.getinfo(geturls_text
.HTTP_CODE
));
1196 httpversionout
= pyhttpverinfo
[0];
1197 httpmethodout
= httpmethod
;
1198 httpurlout
= geturls_text
.getinfo(geturls_text
.EFFECTIVE_URL
);
1199 httpheaderout
= pycurlheadersout
;
1200 httpheadersentout
= httpheaders
;
1201 elif(httplibuse
=="ftp"):
1202 geturls_text
= download_file_from_ftp_file(httpurl
);
1203 if(not geturls_text
):
1205 downloadsize
= None;
1206 if(downloadsize
is not None):
1207 downloadsize
= int(downloadsize
);
1208 if downloadsize
is None: downloadsize
= 0;
1211 log
.info("Downloading URL "+httpurl
);
1212 with
BytesIO() as strbuf
:
1214 databytes
= geturls_text
.read(buffersize
);
1215 if not databytes
: break;
1216 datasize
= len(databytes
);
1217 fulldatasize
= datasize
+ fulldatasize
;
1220 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
1221 downloaddiff
= fulldatasize
- prevdownsize
;
1222 log
.info("Downloading "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Downloaded "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
1223 prevdownsize
= fulldatasize
;
1224 strbuf
.write(databytes
);
1226 returnval_content
= strbuf
.read();
1227 returnval
= {'Type': "Content", 'Content': returnval_content
, 'Contentsize': fulldatasize
, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize
, 2, "IEC"), 'SI': get_readable_size(fulldatasize
, 2, "SI")}, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl
, 'Code': None};
1228 geturls_text
.close();
1229 elif(httplibuse
=="sftp"):
1230 geturls_text
= download_file_from_sftp_file(httpurl
);
1231 if(not geturls_text
):
1233 downloadsize
= None;
1234 if(downloadsize
is not None):
1235 downloadsize
= int(downloadsize
);
1236 if downloadsize
is None: downloadsize
= 0;
1239 log
.info("Downloading URL "+httpurl
);
1240 with
BytesIO() as strbuf
:
1242 databytes
= geturls_text
.read(buffersize
);
1243 if not databytes
: break;
1244 datasize
= len(databytes
);
1245 fulldatasize
= datasize
+ fulldatasize
;
1248 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
1249 downloaddiff
= fulldatasize
- prevdownsize
;
1250 log
.info("Downloading "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Downloaded "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
1251 prevdownsize
= fulldatasize
;
1252 strbuf
.write(databytes
);
1254 returnval_content
= strbuf
.read();
1255 returnval
= {'Type': "Content", 'Content': returnval_content
, 'Contentsize': fulldatasize
, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize
, 2, "IEC"), 'SI': get_readable_size(fulldatasize
, 2, "SI")}, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl
, 'Code': None};
1256 geturls_text
.close();
1258 elif(httplibuse
=="pysftp"):
1259 geturls_text
= download_file_from_pysftp_file(httpurl
);
1260 if(not geturls_text
):
1262 downloadsize
= None;
1263 if(downloadsize
is not None):
1264 downloadsize
= int(downloadsize
);
1265 if downloadsize
is None: downloadsize
= 0;
1268 log
.info("Downloading URL "+httpurl
);
1269 with
BytesIO() as strbuf
:
1271 databytes
= geturls_text
.read(buffersize
);
1272 if not databytes
: break;
1273 datasize
= len(databytes
);
1274 fulldatasize
= datasize
+ fulldatasize
;
1277 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
1278 downloaddiff
= fulldatasize
- prevdownsize
;
1279 log
.info("Downloading "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Downloaded "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
1280 prevdownsize
= fulldatasize
;
1281 strbuf
.write(databytes
);
1283 returnval_content
= strbuf
.read();
1284 returnval
= {'Type': "Content", 'Content': returnval_content
, 'Contentsize': fulldatasize
, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize
, 2, "IEC"), 'SI': get_readable_size(fulldatasize
, 2, "SI")}, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl
, 'Code': None};
1285 geturls_text
.close();
1289 if(isinstance(httpheaderout
, list) and (httplibuse
!="pycurl" and httplibuse
!="pycurl2" and httplibuse
!="pycurl3")):
1290 httpheaderout
= dict(make_http_headers_from_list_to_dict(httpheaderout
));
1291 if(isinstance(httpheaderout
, list) and (httplibuse
=="pycurl" or httplibuse
=="pycurl2" or httplibuse
=="pycurl3")):
1292 httpheaderout
= dict(make_http_headers_from_pycurl_to_dict("\r\n".join(httpheaderout
)));
1293 if(sys
.version
[0]=="2"):
1295 prehttpheaderout
= httpheaderout
;
1296 httpheaderkeys
= httpheaderout
.keys();
1297 imax
= len(httpheaderkeys
);
1301 httpheaderout
.update({httpheaderkeys
[ic
]: prehttpheaderout
[httpheaderkeys
[ic
]]});
1303 except AttributeError:
1305 httpheaderout
= fix_header_names(httpheaderout
);
1306 if(isinstance(httpheadersentout
, list) and (httplibuse
!="pycurl" and httplibuse
!="pycurl2" and httplibuse
!="pycurl3")):
1307 httpheadersentout
= dict(make_http_headers_from_list_to_dict(httpheadersentout
));
1308 if(isinstance(httpheadersentout
, list) and (httplibuse
=="pycurl" or httplibuse
=="pycurl2" or httplibuse
=="pycurl3")):
1309 httpheadersentout
= dict(make_http_headers_from_pycurl_to_dict("\r\n".join(httpheadersentout
)));
1310 httpheadersentout
= fix_header_names(httpheadersentout
);
1311 log
.info("Downloading URL "+httpurl
);
1312 if(httplibuse
=="urllib" or httplibuse
=="request" or httplibuse
=="request3" or httplibuse
=="httplib" or httplibuse
=="httplib2" or httplibuse
=="urllib3" or httplibuse
=="mechanize" or httplibuse
=="httpx" or httplibuse
=="httpx2" or httplibuse
=="httpcore" or httplibuse
=="httpcore2"):
1313 downloadsize
= httpheaderout
.get('Content-Length');
1314 if(downloadsize
is not None):
1315 downloadsize
= int(downloadsize
);
1316 if downloadsize
is None: downloadsize
= 0;
1319 log
.info("Downloading URL "+httpurl
);
1320 with
BytesIO() as strbuf
:
1322 databytes
= geturls_text
.read(buffersize
);
1323 if not databytes
: break;
1324 datasize
= len(databytes
);
1325 fulldatasize
= datasize
+ fulldatasize
;
1328 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
1329 downloaddiff
= fulldatasize
- prevdownsize
;
1330 log
.info("Downloading "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Downloaded "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
1331 prevdownsize
= fulldatasize
;
1332 strbuf
.write(databytes
);
1334 returnval_content
= strbuf
.read();
1335 geturls_text
.close();
1336 if(httpheaderout
.get("Content-Encoding")=="gzip"):
1338 returnval_content
= zlib
.decompress(returnval_content
, 16+zlib
.MAX_WBITS
);
1341 elif(httpheaderout
.get("Content-Encoding")=="deflate"):
1343 returnval_content
= zlib
.decompress(returnval_content
);
1346 elif(httpheaderout
.get("Content-Encoding")=="br" and havebrotli
):
1348 returnval_content
= brotli
.decompress(returnval_content
);
1349 except brotli
.error
:
1351 elif(httpheaderout
.get("Content-Encoding")=="zstd" and havezstd
):
1353 returnval_content
= zstandard
.decompress(returnval_content
);
1354 except zstandard
.error
:
1356 elif(httplibuse
=="requests"):
1357 log
.info("Downloading URL "+httpurl
);
1358 downloadsize
= httpheaderout
.get('Content-Length');
1359 if(downloadsize
is not None):
1360 downloadsize
= int(downloadsize
);
1361 if downloadsize
is None: downloadsize
= 0;
1364 log
.info("Downloading URL "+httpurl
);
1365 with
BytesIO() as strbuf
:
1367 databytes
= geturls_text
.raw
.read(buffersize
);
1368 if not databytes
: break;
1369 datasize
= len(databytes
);
1370 fulldatasize
= datasize
+ fulldatasize
;
1373 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
1374 downloaddiff
= fulldatasize
- prevdownsize
;
1375 log
.info("Downloading "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Downloaded "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
1376 prevdownsize
= fulldatasize
;
1377 strbuf
.write(databytes
);
1379 returnval_content
= strbuf
.read();
1380 geturls_text
.close();
1381 if(httpheaderout
.get("Content-Encoding")=="gzip"):
1383 returnval_content
= zlib
.decompress(returnval_content
, 16+zlib
.MAX_WBITS
);
1386 elif(httpheaderout
.get("Content-Encoding")=="deflate"):
1388 returnval_content
= zlib
.decompress(returnval_content
);
1391 elif(httpheaderout
.get("Content-Encoding")=="br" and havebrotli
):
1393 returnval_content
= brotli
.decompress(returnval_content
);
1394 except brotli
.error
:
1396 elif(httpheaderout
.get("Content-Encoding")=="zstd" and havezstd
):
1398 returnval_content
= zstandard
.decompress(returnval_content
);
1399 except zstandard
.error
:
1401 elif(httplibuse
=="pycurl" or httplibuse
=="pycurl2" or httplibuse
=="pycurl3"):
1402 log
.info("Downloading URL "+httpurl
);
1403 downloadsize
= httpheaderout
.get('Content-Length');
1404 if(downloadsize
is not None):
1405 downloadsize
= int(downloadsize
);
1406 if downloadsize
is None: downloadsize
= 0;
1409 log
.info("Downloading URL "+httpurl
);
1410 with
BytesIO() as strbuf
:
1412 databytes
= retrieved_body
.read(buffersize
);
1413 if not databytes
: break;
1414 datasize
= len(databytes
);
1415 fulldatasize
= datasize
+ fulldatasize
;
1418 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
1419 downloaddiff
= fulldatasize
- prevdownsize
;
1420 log
.info("Downloading "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Downloaded "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
1421 prevdownsize
= fulldatasize
;
1422 strbuf
.write(databytes
);
1424 returnval_content
= strbuf
.read();
1425 geturls_text
.close();
1426 if(httpheaderout
.get("Content-Encoding")=="gzip"):
1428 returnval_content
= zlib
.decompress(returnval_content
, 16+zlib
.MAX_WBITS
);
1431 elif(httpheaderout
.get("Content-Encoding")=="deflate"):
1433 returnval_content
= zlib
.decompress(returnval_content
);
1436 elif(httpheaderout
.get("Content-Encoding")=="br" and havebrotli
):
1438 returnval_content
= brotli
.decompress(returnval_content
);
1439 except brotli
.error
:
1441 elif(httpheaderout
.get("Content-Encoding")=="zstd" and havezstd
):
1443 returnval_content
= zstandard
.decompress(returnval_content
);
1444 except zstandard
.error
:
1446 elif(httplibuse
=="ftp" or httplibuse
=="sftp" or httplibuse
=="pysftp"):
1450 returnval
= {'Type': "Content", 'Content': returnval_content
, 'Contentsize': fulldatasize
, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize
, 2, "IEC"), 'SI': get_readable_size(fulldatasize
, 2, "SI")}, 'Headers': httpheaderout
, 'Version': httpversionout
, 'Method': httpmethodout
, 'HeadersSent': httpheadersentout
, 'URL': httpurlout
, 'Code': httpcodeout
, 'Reason': httpcodereason
};
1453 def download_from_url_file(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, httplibuse
="urllib", ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1454 global geturls_download_sleep
, havezstd
, havebrotli
, tmpfileprefix
, tmpfilesuffix
, haverequests
, havemechanize
, havehttplib2
, haveurllib3
, havehttpx
, havehttpcore
, haveparamiko
, havepysftp
;
1455 exec_time_start
= time
.time();
1456 myhash
= hashlib
.new("sha1");
1457 if(sys
.version
[0]=="2"):
1458 myhash
.update(httpurl
);
1459 myhash
.update(str(buffersize
));
1460 myhash
.update(str(exec_time_start
));
1461 if(sys
.version
[0]>="3"):
1462 myhash
.update(httpurl
.encode('utf-8'));
1463 myhash
.update(str(buffersize
).encode('utf-8'));
1464 myhash
.update(str(exec_time_start
).encode('utf-8'));
1465 newtmpfilesuffix
= tmpfilesuffix
+ str(myhash
.hexdigest());
1467 sleep
= geturls_download_sleep
;
1470 if(httplibuse
=="urllib1" or httplibuse
=="urllib2" or httplibuse
=="request"):
1471 httplibuse
= "urllib";
1472 if(httplibuse
=="httplib1"):
1473 httplibuse
= "httplib";
1474 if(not haverequests
and httplibuse
=="requests"):
1475 httplibuse
= "urllib";
1476 if(not havehttpx
and httplibuse
=="httpx"):
1477 httplibuse
= "urllib";
1478 if(not havehttpx
and httplibuse
=="httpx2"):
1479 httplibuse
= "urllib";
1480 if(not havehttpcore
and httplibuse
=="httpcore"):
1481 httplibuse
= "urllib";
1482 if(not havehttpcore
and httplibuse
=="httpcore2"):
1483 httplibuse
= "urllib";
1484 if(not havemechanize
and httplibuse
=="mechanize"):
1485 httplibuse
= "urllib";
1486 if(not havepycurl
and httplibuse
=="pycurl"):
1487 httplibuse
= "urllib";
1488 if(not havepycurl
and httplibuse
=="pycurl2"):
1489 httplibuse
= "urllib";
1490 if(havepycurl
and httplibuse
=="pycurl2" and not hasattr(pycurl
, "CURL_HTTP_VERSION_2_0")):
1491 httplibuse
= "pycurl";
1492 if(not havepycurl
and httplibuse
=="pycurl3"):
1493 httplibuse
= "urllib";
1494 if(havepycurl
and httplibuse
=="pycurl3" and not hasattr(pycurl
, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl
, "CURL_HTTP_VERSION_2_0")):
1495 httplibuse
= "pycurl2";
1496 if(havepycurl
and httplibuse
=="pycurl3" and not hasattr(pycurl
, "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl
, "CURL_HTTP_VERSION_2_0")):
1497 httplibuse
= "pycurl";
1498 if(not havehttplib2
and httplibuse
=="httplib2"):
1499 httplibuse
= "httplib";
1500 if(not haveparamiko
and httplibuse
=="sftp"):
1502 if(not haveparamiko
and httplibuse
=="pysftp"):
1504 pretmpfilename
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, httplibuse
, buffersize
, sleep
, timeout
);
1505 if(not pretmpfilename
):
1507 with tempfile
.NamedTemporaryFile('wb+', prefix
=tmpfileprefix
, suffix
=newtmpfilesuffix
, delete
=False) as f
:
1508 tmpfilename
= f
.name
;
1510 os
.utime(tmpfilename
, (time
.mktime(email
.utils
.parsedate_to_datetime(pretmpfilename
.get('Headers').get('Last-Modified')).timetuple()), time
.mktime(email
.utils
.parsedate_to_datetime(pretmpfilename
.get('Headers').get('Last-Modified')).timetuple())));
1511 except AttributeError:
1513 os
.utime(tmpfilename
, (time
.mktime(datetime
.datetime
.strptime(pretmpfilename
.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time
.mktime(datetime
.datetime
.strptime(pretmpfilename
.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
1518 returnval
= {'Type': "File", 'Filename': tmpfilename
, 'Filesize': pretmpfilename
.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename
.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename
.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename
.get('Headers'), 'Version': pretmpfilename
.get('Version'), 'Method': pretmpfilename
.get('Method'), 'HeadersSent': pretmpfilename
.get('HeadersSent'), 'URL': pretmpfilename
.get('URL'), 'Code': pretmpfilename
.get('Code'), 'Reason': pretmpfilename
.get('Reason')};
1519 f
.write(pretmpfilename
['Content']);
1521 exec_time_end
= time
.time();
1522 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to download file.");
1523 returnval
.update({'Filesize': os
.path
.getsize(tmpfilename
), 'FilesizeAlt': {'IEC': get_readable_size(os
.path
.getsize(tmpfilename
), 2, "IEC"), 'SI': get_readable_size(os
.path
.getsize(tmpfilename
), 2, "SI")}, 'DownloadTime': float(exec_time_start
- exec_time_end
), 'DownloadTimeReadable': hms_string(exec_time_start
- exec_time_end
)});
1526 def download_from_url_to_file(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, httplibuse
="urllib", outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1527 global geturls_download_sleep
, havezstd
, havebrotli
, haverequests
, havemechanize
, havepycurl
, havehttplib2
, haveurllib3
, havehttpx
, havehttpcore
, haveparamiko
, havepysftp
;
1529 sleep
= geturls_download_sleep
;
1532 if(httplibuse
=="urllib1" or httplibuse
=="urllib2" or httplibuse
=="request"):
1533 httplibuse
= "urllib";
1534 if(httplibuse
=="httplib1"):
1535 httplibuse
= "httplib";
1536 if(not haverequests
and httplibuse
=="requests"):
1537 httplibuse
= "urllib";
1538 if(not havehttpx
and httplibuse
=="httpx"):
1539 httplibuse
= "urllib";
1540 if(not havehttpx
and httplibuse
=="httpx2"):
1541 httplibuse
= "urllib";
1542 if(not havehttpcore
and httplibuse
=="httpcore"):
1543 httplibuse
= "urllib";
1544 if(not havehttpcore
and httplibuse
=="httpcore2"):
1545 httplibuse
= "urllib";
1546 if(not havemechanize
and httplibuse
=="mechanize"):
1547 httplibuse
= "urllib";
1548 if(not havepycurl
and httplibuse
=="pycurl"):
1549 httplibuse
= "urllib";
1550 if(not havepycurl
and httplibuse
=="pycurl2"):
1551 httplibuse
= "urllib";
1552 if(havepycurl
and httplibuse
=="pycurl2" and not hasattr(pycurl
, "CURL_HTTP_VERSION_2_0")):
1553 httplibuse
= "pycurl";
1554 if(not havepycurl
and httplibuse
=="pycurl3"):
1555 httplibuse
= "urllib";
1556 if(havepycurl
and httplibuse
=="pycurl3" and not hasattr(pycurl
, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl
, "CURL_HTTP_VERSION_2_0")):
1557 httplibuse
= "pycurl2";
1558 if(havepycurl
and httplibuse
=="pycurl3" and not hasattr(pycurl
, "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl
, "CURL_HTTP_VERSION_2_0")):
1559 httplibuse
= "pycurl";
1560 if(not havehttplib2
and httplibuse
=="httplib2"):
1561 httplibuse
= "httplib";
1562 if(not haveparamiko
and httplibuse
=="sftp"):
1564 if(not havepysftp
and httplibuse
=="pysftp"):
1566 if(not outfile
=="-"):
1567 outpath
= outpath
.rstrip(os
.path
.sep
);
1568 filepath
= os
.path
.realpath(outpath
+os
.path
.sep
+outfile
);
1569 if(not os
.path
.exists(outpath
)):
1570 os
.makedirs(outpath
);
1571 if(os
.path
.exists(outpath
) and os
.path
.isfile(outpath
)):
1573 if(os
.path
.exists(filepath
) and os
.path
.isdir(filepath
)):
1575 pretmpfilename
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, httplibuse
, ranges
, buffersize
[0], sleep
, timeout
);
1576 if(not pretmpfilename
):
1578 tmpfilename
= pretmpfilename
['Filename'];
1579 downloadsize
= int(os
.path
.getsize(tmpfilename
));
1581 log
.info("Moving file "+tmpfilename
+" to "+filepath
);
1582 exec_time_start
= time
.time();
1583 shutil
.move(tmpfilename
, filepath
);
1585 os
.utime(filepath
, (time
.mktime(email
.utils
.parsedate_to_datetime(pretmpfilename
.get('Headers').get('Last-Modified')).timetuple()), time
.mktime(email
.utils
.parsedate_to_datetime(pretmpfilename
.get('Headers').get('Last-Modified')).timetuple())));
1586 except AttributeError:
1588 os
.utime(filepath
, (time
.mktime(datetime
.datetime
.strptime(pretmpfilename
.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time
.mktime(datetime
.datetime
.strptime(pretmpfilename
.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
1593 exec_time_end
= time
.time();
1594 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to move file.");
1595 if(os
.path
.exists(tmpfilename
)):
1596 os
.remove(tmpfilename
);
1597 returnval
= {'Type': "File", 'Filename': filepath
, 'Filesize': downloadsize
, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'DownloadTime': pretmpfilename
['DownloadTime'], 'DownloadTimeReadable': pretmpfilename
['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start
- exec_time_end
), 'MoveFileTimeReadable': hms_string(exec_time_start
- exec_time_end
), 'Headers': pretmpfilename
['Headers'], 'Version': pretmpfilename
['Version'], 'Method': pretmpfilename
['Method'], 'Method': httpmethod
, 'HeadersSent': pretmpfilename
['HeadersSent'], 'URL': pretmpfilename
['URL'], 'Code': pretmpfilename
['Code'], 'Reason': pretmpfilename
['Reason']};
1599 pretmpfilename
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, httplibuse
, ranges
, buffersize
[0], sleep
, timeout
);
1600 tmpfilename
= pretmpfilename
['Filename'];
1601 downloadsize
= int(os
.path
.getsize(tmpfilename
));
1604 exec_time_start
= time
.time();
1605 with
open(tmpfilename
, 'rb') as ft
:
1608 databytes
= ft
.read(buffersize
[1]);
1609 if not databytes
: break;
1610 datasize
= len(databytes
);
1611 fulldatasize
= datasize
+ fulldatasize
;
1614 percentage
= str("{0:.2f}".format(float(float(fulldatasize
/ downloadsize
) * 100))).rstrip('0').rstrip('.')+"%";
1615 downloaddiff
= fulldatasize
- prevdownsize
;
1616 log
.info("Copying "+get_readable_size(fulldatasize
, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize
, 2, "SI")['ReadableWithSuffix']+" "+str(percentage
)+" / Copied "+get_readable_size(downloaddiff
, 2, "IEC")['ReadableWithSuffix']);
1617 prevdownsize
= fulldatasize
;
1620 fdata
= f
.getvalue();
1623 os
.remove(tmpfilename
);
1624 exec_time_end
= time
.time();
1625 log
.info("It took "+hms_string(exec_time_start
- exec_time_end
)+" to copy file.");
1626 returnval
= {'Type': "Content", 'Content': fdata
, 'Contentsize': downloadsize
, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize
, 2, "IEC"), 'SI': get_readable_size(downloadsize
, 2, "SI")}, 'DownloadTime': pretmpfilename
['DownloadTime'], 'DownloadTimeReadable': pretmpfilename
['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start
- exec_time_end
), 'MoveFileTimeReadable': hms_string(exec_time_start
- exec_time_end
), 'Headers': pretmpfilename
['Headers'], 'Version': pretmpfilename
['Version'], 'Method': pretmpfilename
['Method'], 'Method': httpmethod
, 'HeadersSent': pretmpfilename
['HeadersSent'], 'URL': pretmpfilename
['URL'], 'Code': pretmpfilename
['Code'], 'Reason': pretmpfilename
['Reason']};
1629 def download_from_url_with_urllib(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1630 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "urllib", buffersize
, sleep
, timeout
);
1633 def download_from_url_with_request(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1634 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "urllib", buffersize
, sleep
, timeout
);
1637 def download_from_url_with_request3(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1638 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "request3", buffersize
, sleep
, timeout
);
1641 def download_from_url_with_httplib(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1642 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httplib", buffersize
, sleep
, timeout
);
1645 def download_from_url_with_httplib2(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1646 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httplib2", buffersize
, sleep
, timeout
);
1649 def download_from_url_with_urllib3(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1650 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "urllib3", buffersize
, sleep
, timeout
);
1653 def download_from_url_with_requests(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1654 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "requests", buffersize
, sleep
, timeout
);
1657 def download_from_url_with_httpx(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1658 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httpx", buffersize
, sleep
, timeout
);
1661 def download_from_url_with_httpx2(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1662 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httpx2", buffersize
, sleep
, timeout
);
1665 def download_from_url_with_httpcore(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1666 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httpcore", buffersize
, sleep
, timeout
);
1669 def download_from_url_with_httpcore2(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1670 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httpcore2", buffersize
, sleep
, timeout
);
1673 def download_from_url_with_mechanize(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1674 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "mechanize", buffersize
, sleep
, timeout
);
1677 def download_from_url_with_pycurl(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1678 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "pycurl", buffersize
, sleep
, timeout
);
1681 def download_from_url_with_pycurl2(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1682 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "pycurl2", buffersize
, sleep
, timeout
);
1685 def download_from_url_with_pycurl3(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1686 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "pycurl3", buffersize
, sleep
, timeout
);
1689 def download_from_url_with_ftp(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1690 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "ftp", buffersize
, sleep
, timeout
);
1693 def download_from_url_with_sftp(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1694 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "sftp", buffersize
, sleep
, timeout
);
1697 def download_from_url_with_pysftp(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, buffersize
=524288, sleep
=-1, timeout
=10):
1698 returnval
= download_from_url(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "pysftp", buffersize
, sleep
, timeout
);
1701 def download_from_url_file_with_urllib(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1702 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "urllib", ranges
, buffersize
, sleep
, timeout
);
1705 def download_from_url_file_with_request(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1706 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "urllib", ranges
, buffersize
, sleep
, timeout
);
1709 def download_from_url_file_with_request3(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1710 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "request3", ranges
, buffersize
, sleep
, timeout
);
1713 def download_from_url_file_with_httplib(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1714 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httplib", ranges
, buffersize
, sleep
, timeout
);
1717 def download_from_url_file_with_httplib2(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1718 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httplib2", ranges
, buffersize
, sleep
, timeout
);
1721 def download_from_url_file_with_urllib3(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1722 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "urllib3", ranges
, buffersize
, sleep
, timeout
);
1725 def download_from_url_file_with_requests(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1726 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "requests", ranges
, buffersize
, sleep
, timeout
);
1729 def download_from_url_file_with_httpx(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1730 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httpx", ranges
, buffersize
, sleep
, timeout
);
1733 def download_from_url_file_with_httpx2(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1734 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httpx2", ranges
, buffersize
, sleep
, timeout
);
1737 def download_from_url_file_with_httpcore(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1738 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httpcore", ranges
, buffersize
, sleep
, timeout
);
1741 def download_from_url_file_with_httpcore2(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1742 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httpcore2", ranges
, buffersize
, sleep
, timeout
);
1745 def download_from_url_file_with_mechanize(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1746 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "mechanize", ranges
, buffersize
, sleep
, timeout
);
1749 def download_from_url_file_with_pycurl(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1750 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "pycurl", ranges
, buffersize
, sleep
, timeout
);
1753 def download_from_url_file_with_pycurl2(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1754 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "pycurl2", ranges
, buffersize
, sleep
, timeout
);
1757 def download_from_url_file_with_pycurl3(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1758 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "pycurl3", ranges
, buffersize
, sleep
, timeout
);
1761 def download_from_url_file_with_ftp(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1762 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "ftp", ranges
, buffersize
, sleep
, timeout
);
1765 def download_from_url_file_with_sftp(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1766 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "sftp", ranges
, buffersize
, sleep
, timeout
);
1769 def download_from_url_file_with_pysftp(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, ranges
=[None, None], buffersize
=524288, sleep
=-1, timeout
=10):
1770 returnval
= download_from_url_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "pysftp", ranges
, buffersize
, sleep
, timeout
);
1773 def download_from_url_to_file_with_urllib(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1774 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "urllib", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1777 def download_from_url_to_file_with_request(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1778 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "request", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1781 def download_from_url_to_file_with_request3(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1782 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "urllib", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1785 def download_from_url_to_file_with_httplib(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1786 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httplib", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1789 def download_from_url_to_file_with_httplib2(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1790 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httplib2", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1793 def download_from_url_to_file_with_urllib3(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1794 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "urllib3", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1797 def download_from_url_to_file_with_requests(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1798 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "requests", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1801 def download_from_url_to_file_with_httpx(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1802 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httpx", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1805 def download_from_url_to_file_with_httpx2(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1806 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httpx2", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1809 def download_from_url_to_file_with_httpcore(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1810 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httpcore", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1813 def download_from_url_to_file_with_httpcore2(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1814 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "httpcore2", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1817 def download_from_url_to_file_with_mechanize(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1818 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "mechanize", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1821 def download_from_url_to_file_with_pycurl(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1822 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "pycurl", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1825 def download_from_url_to_file_with_pycurl2(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1826 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "pycurl2", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1829 def download_from_url_to_file_with_pycurl3(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1830 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "pycurl3", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1833 def download_from_url_to_file_with_ftp(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1834 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "ftp", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1837 def download_from_url_to_file_with_sftp(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1838 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "sftp", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1841 def download_from_url_to_file_with_pysftp(httpurl
, httpheaders
=geturls_headers
, httpuseragent
=None, httpreferer
=None, httpcookie
=geturls_cj
, httpmethod
="GET", postdata
=None, outfile
="-", outpath
=os
.getcwd(), ranges
=[None, None], buffersize
=[524288, 524288], sleep
=-1, timeout
=10):
1842 returnval
= download_from_url_to_file(httpurl
, httpheaders
, httpuseragent
, httpreferer
, httpcookie
, httpmethod
, postdata
, "pysftp", outfile
, outpath
, ranges
, buffersize
, sleep
, timeout
);
1845 def download_file_from_ftp_file(url
):
1846 urlparts
= urlparse
.urlparse(url
);
1847 file_name
= os
.path
.basename(urlparts
.path
);
1848 file_dir
= os
.path
.dirname(urlparts
.path
);
1849 if(urlparts
.username
is not None):
1850 ftp_username
= urlparts
.username
;
1852 ftp_username
= "anonymous";
1853 if(urlparts
.password
is not None):
1854 ftp_password
= urlparts
.password
;
1855 elif(urlparts
.password
is None and urlparts
.username
=="anonymous"):
1856 ftp_password
= "anonymous";
1859 if(urlparts
.scheme
=="ftp"):
1861 elif(urlparts
.scheme
=="ftps"):
1865 if(urlparts
.scheme
=="http" or urlparts
.scheme
=="https"):
1867 ftp_port
= urlparts
.port
;
1868 if(urlparts
.port
is None):
1871 ftp
.connect(urlparts
.hostname
, ftp_port
);
1872 except socket
.gaierror
:
1873 log
.info("Error With URL "+httpurl
);
1875 except socket
.timeout
:
1876 log
.info("Error With URL "+httpurl
);
1878 ftp
.login(urlparts
.username
, urlparts
.password
);
1879 if(urlparts
.scheme
=="ftps"):
1881 ftpfile
= BytesIO();
1882 ftp
.retrbinary("RETR "+urlparts
.path
, ftpfile
.write
);
1883 #ftp.storbinary("STOR "+urlparts.path, ftpfile.write);
1888 def download_file_from_ftp_string(url
):
1889 ftpfile
= download_file_from_ftp_file(url
);
1890 return ftpfile
.read();
1892 def upload_file_to_ftp_file(ftpfile
, url
):
1893 urlparts
= urlparse
.urlparse(url
);
1894 file_name
= os
.path
.basename(urlparts
.path
);
1895 file_dir
= os
.path
.dirname(urlparts
.path
);
1896 if(urlparts
.username
is not None):
1897 ftp_username
= urlparts
.username
;
1899 ftp_username
= "anonymous";
1900 if(urlparts
.password
is not None):
1901 ftp_password
= urlparts
.password
;
1902 elif(urlparts
.password
is None and urlparts
.username
=="anonymous"):
1903 ftp_password
= "anonymous";
1906 if(urlparts
.scheme
=="ftp"):
1908 elif(urlparts
.scheme
=="ftps"):
1912 if(urlparts
.scheme
=="http" or urlparts
.scheme
=="https"):
1914 ftp_port
= urlparts
.port
;
1915 if(urlparts
.port
is None):
1918 ftp
.connect(urlparts
.hostname
, ftp_port
);
1919 except socket
.gaierror
:
1920 log
.info("Error With URL "+httpurl
);
1922 except socket
.timeout
:
1923 log
.info("Error With URL "+httpurl
);
1925 ftp
.login(urlparts
.username
, urlparts
.password
);
1926 if(urlparts
.scheme
=="ftps"):
1928 ftp
.storbinary("STOR "+urlparts
.path
, ftpfile
);
1933 def upload_file_to_ftp_string(ftpstring
, url
):
1934 ftpfileo
= BytesIO(ftpstring
);
1935 ftpfile
= upload_file_to_ftp_file(ftpfileo
, url
);
1940 def download_file_from_sftp_file(url
):
1941 urlparts
= urlparse
.urlparse(url
);
1942 file_name
= os
.path
.basename(urlparts
.path
);
1943 file_dir
= os
.path
.dirname(urlparts
.path
);
1944 if(urlparts
.scheme
=="http" or urlparts
.scheme
=="https"):
1946 sftp_port
= urlparts
.port
;
1947 if(urlparts
.port
is None):
1950 sftp_port
= urlparts
.port
;
1951 if(urlparts
.username
is not None):
1952 sftp_username
= urlparts
.username
;
1954 sftp_username
= "anonymous";
1955 if(urlparts
.password
is not None):
1956 sftp_password
= urlparts
.password
;
1957 elif(urlparts
.password
is None and urlparts
.username
=="anonymous"):
1958 sftp_password
= "anonymous";
1961 if(urlparts
.scheme
!="sftp"):
1963 ssh
= paramiko
.SSHClient();
1964 ssh
.load_system_host_keys();
1965 ssh
.set_missing_host_key_policy(paramiko
.AutoAddPolicy());
1967 ssh
.connect(urlparts
.hostname
, port
=sftp_port
, username
=urlparts
.username
, password
=urlparts
.password
);
1968 except paramiko
.ssh_exception
.SSHException
:
1970 except socket
.gaierror
:
1971 log
.info("Error With URL "+httpurl
);
1973 except socket
.timeout
:
1974 log
.info("Error With URL "+httpurl
);
1976 sftp
= ssh
.open_sftp();
1977 sftpfile
= BytesIO();
1978 sftp
.getfo(urlparts
.path
, sftpfile
);
1981 sftpfile
.seek(0, 0);
1984 def download_file_from_sftp_file(url
):
1988 def download_file_from_sftp_string(url
):
1989 sftpfile
= download_file_from_sftp_file(url
);
1990 return sftpfile
.read();
1992 def download_file_from_ftp_string(url
):
1996 def upload_file_to_sftp_file(sftpfile
, url
):
1997 urlparts
= urlparse
.urlparse(url
);
1998 file_name
= os
.path
.basename(urlparts
.path
);
1999 file_dir
= os
.path
.dirname(urlparts
.path
);
2000 sftp_port
= urlparts
.port
;
2001 if(urlparts
.scheme
=="http" or urlparts
.scheme
=="https"):
2003 if(urlparts
.port
is None):
2006 sftp_port
= urlparts
.port
;
2007 if(urlparts
.username
is not None):
2008 sftp_username
= urlparts
.username
;
2010 sftp_username
= "anonymous";
2011 if(urlparts
.password
is not None):
2012 sftp_password
= urlparts
.password
;
2013 elif(urlparts
.password
is None and urlparts
.username
=="anonymous"):
2014 sftp_password
= "anonymous";
2017 if(urlparts
.scheme
!="sftp"):
2019 ssh
= paramiko
.SSHClient();
2020 ssh
.load_system_host_keys();
2021 ssh
.set_missing_host_key_policy(paramiko
.AutoAddPolicy());
2023 ssh
.connect(urlparts
.hostname
, port
=sftp_port
, username
=urlparts
.username
, password
=urlparts
.password
);
2024 except paramiko
.ssh_exception
.SSHException
:
2026 except socket
.gaierror
:
2027 log
.info("Error With URL "+httpurl
);
2029 except socket
.timeout
:
2030 log
.info("Error With URL "+httpurl
);
2032 sftp
= ssh
.open_sftp();
2033 sftp
.putfo(sftpfile
, urlparts
.path
);
2036 sftpfile
.seek(0, 0);
2039 def upload_file_to_sftp_file(sftpfile
, url
):
2043 def upload_file_to_sftp_string(sftpstring
, url
):
2044 sftpfileo
= BytesIO(sftpstring
);
2045 sftpfile
= upload_file_to_sftp_files(ftpfileo
, url
);
2049 def upload_file_to_sftp_string(url
):
2054 def download_file_from_pysftp_file(url
):
2055 urlparts
= urlparse
.urlparse(url
);
2056 file_name
= os
.path
.basename(urlparts
.path
);
2057 file_dir
= os
.path
.dirname(urlparts
.path
);
2058 if(urlparts
.scheme
=="http" or urlparts
.scheme
=="https"):
2060 sftp_port
= urlparts
.port
;
2061 if(urlparts
.port
is None):
2064 sftp_port
= urlparts
.port
;
2065 if(urlparts
.username
is not None):
2066 sftp_username
= urlparts
.username
;
2068 sftp_username
= "anonymous";
2069 if(urlparts
.password
is not None):
2070 sftp_password
= urlparts
.password
;
2071 elif(urlparts
.password
is None and urlparts
.username
=="anonymous"):
2072 sftp_password
= "anonymous";
2075 if(urlparts
.scheme
!="sftp"):
2078 pysftp
.Connection(urlparts
.hostname
, port
=sftp_port
, username
=urlparts
.username
, password
=urlparts
.password
);
2079 except paramiko
.ssh_exception
.SSHException
:
2081 except socket
.gaierror
:
2082 log
.info("Error With URL "+httpurl
);
2084 except socket
.timeout
:
2085 log
.info("Error With URL "+httpurl
);
2087 sftp
= ssh
.open_sftp();
2088 sftpfile
= BytesIO();
2089 sftp
.getfo(urlparts
.path
, sftpfile
);
2092 sftpfile
.seek(0, 0);
2095 def download_file_from_pysftp_file(url
):
2099 def download_file_from_pysftp_string(url
):
2100 sftpfile
= download_file_from_pysftp_file(url
);
2101 return sftpfile
.read();
2103 def download_file_from_ftp_string(url
):
2107 def upload_file_to_pysftp_file(sftpfile
, url
):
2108 urlparts
= urlparse
.urlparse(url
);
2109 file_name
= os
.path
.basename(urlparts
.path
);
2110 file_dir
= os
.path
.dirname(urlparts
.path
);
2111 sftp_port
= urlparts
.port
;
2112 if(urlparts
.scheme
=="http" or urlparts
.scheme
=="https"):
2114 if(urlparts
.port
is None):
2117 sftp_port
= urlparts
.port
;
2118 if(urlparts
.username
is not None):
2119 sftp_username
= urlparts
.username
;
2121 sftp_username
= "anonymous";
2122 if(urlparts
.password
is not None):
2123 sftp_password
= urlparts
.password
;
2124 elif(urlparts
.password
is None and urlparts
.username
=="anonymous"):
2125 sftp_password
= "anonymous";
2128 if(urlparts
.scheme
!="sftp"):
2131 pysftp
.Connection(urlparts
.hostname
, port
=sftp_port
, username
=urlparts
.username
, password
=urlparts
.password
);
2132 except paramiko
.ssh_exception
.SSHException
:
2134 except socket
.gaierror
:
2135 log
.info("Error With URL "+httpurl
);
2137 except socket
.timeout
:
2138 log
.info("Error With URL "+httpurl
);
2140 sftp
= ssh
.open_sftp();
2141 sftp
.putfo(sftpfile
, urlparts
.path
);
2144 sftpfile
.seek(0, 0);
2147 def upload_file_to_pysftp_file(sftpfile
, url
):
2151 def upload_file_to_pysftp_string(sftpstring
, url
):
2152 sftpfileo
= BytesIO(sftpstring
);
2153 sftpfile
= upload_file_to_pysftp_files(ftpfileo
, url
);
2157 def upload_file_to_pysftp_string(url
):