Add files via upload
[PyWWW-Get.git] / pywwwget.py
blob64921ff0a068008d3c2741d6ce48e892a0f2bdc9
1 #!/usr/bin/env python
3 '''
4 This program is free software; you can redistribute it and/or modify
5 it under the terms of the Revised BSD License.
7 This program is distributed in the hope that it will be useful,
8 but WITHOUT ANY WARRANTY; without even the implied warranty of
9 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 Revised BSD License for more details.
12 Copyright 2016-2023 Game Maker 2k - https://github.com/GameMaker2k
13 Copyright 2016-2023 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
15 $FileInfo: pywwwget.py - Last Update: 10/5/2023 Ver. 2.0.2 RC 1 - Author: cooldude2k $
16 '''
18 from __future__ import division, absolute_import, print_function;
19 import re, os, sys, hashlib, shutil, platform, tempfile, urllib, zlib, time, argparse, cgi, subprocess, socket, email.utils, datetime, time;
20 import logging as log;
21 from ftplib import FTP, FTP_TLS;
22 from base64 import b64encode;
23 haverequests = False;
24 try:
25 import requests;
26 haverequests = True;
27 except ImportError:
28 haverequests = False;
29 havemechanize = False;
30 try:
31 import mechanize;
32 havemechanize = True;
33 except ImportError:
34 havemechanize = False;
35 havepycurl = False;
36 try:
37 import pycurl;
38 havepycurl = True;
39 except ImportError:
40 havepycurl = False;
41 haveparamiko = False;
42 try:
43 import paramiko;
44 haveparamiko = True;
45 except ImportError:
46 haveparamiko = False;
47 havepysftp = False;
48 try:
49 import pysftp;
50 havepysftp = True;
51 except ImportError:
52 havepysftp = False;
53 haveurllib3 = False;
54 try:
55 import urllib3;
56 haveurllib3 = True;
57 except ImportError:
58 haveurllib3 = False;
59 havehttplib2 = False;
60 try:
61 from httplib2 import HTTPConnectionWithTimeout, HTTPSConnectionWithTimeout;
62 havehttplib2 = True;
63 except ImportError:
64 havehttplib2 = False;
65 havehttpx = False;
66 try:
67 import httpx;
68 havehttpx = True;
69 except ImportError:
70 havehttpx = False;
71 havehttpcore = False;
72 try:
73 import httpcore;
74 havehttpcore = True;
75 except ImportError:
76 havehttpcore = False;
77 havebrotli = False;
78 try:
79 import brotli;
80 havebrotli = True;
81 except ImportError:
82 havebrotli = False;
83 havezstd = False;
84 try:
85 import zstandard;
86 havezstd = True;
87 except ImportError:
88 havezstd = False;
89 if(sys.version[0]=="2"):
90 try:
91 from io import StringIO, BytesIO;
92 except ImportError:
93 try:
94 from cStringIO import StringIO;
95 from cStringIO import StringIO as BytesIO;
96 except ImportError:
97 from StringIO import StringIO;
98 from StringIO import StringIO as BytesIO;
99 # From http://python-future.org/compatible_idioms.html
100 from urlparse import urlparse, urlunparse, urlsplit, urlunsplit, urljoin;
101 from urllib import urlencode;
102 from urllib import urlopen as urlopenalt;
103 from urllib2 import urlopen, Request, install_opener, HTTPError, URLError, build_opener, HTTPCookieProcessor;
104 import urlparse, cookielib;
105 from httplib import HTTPConnection, HTTPSConnection;
106 if(sys.version[0]>="3"):
107 from io import StringIO, BytesIO;
108 # From http://python-future.org/compatible_idioms.html
109 from urllib.parse import urlparse, urlunparse, urlsplit, urlunsplit, urljoin, urlencode;
110 from urllib.request import urlopen, Request, install_opener, build_opener, HTTPCookieProcessor;
111 from urllib.error import HTTPError, URLError;
112 import urllib.parse as urlparse;
113 import http.cookiejar as cookielib;
114 from http.client import HTTPConnection, HTTPSConnection;
116 __program_name__ = "PyWWW-Get";
117 __program_alt_name__ = "PyWWWGet";
118 __program_small_name__ = "wwwget";
119 __project__ = __program_name__;
120 __project_url__ = "https://github.com/GameMaker2k/PyWWW-Get";
121 __version_info__ = (2, 0, 2, "RC 1", 1);
122 __version_date_info__ = (2023, 10, 5, "RC 1", 1);
123 __version_date__ = str(__version_date_info__[0])+"."+str(__version_date_info__[1]).zfill(2)+"."+str(__version_date_info__[2]).zfill(2);
124 __revision__ = __version_info__[3];
125 __revision_id__ = "$Id$";
126 if(__version_info__[4] is not None):
127 __version_date_plusrc__ = __version_date__+"-"+str(__version_date_info__[4]);
128 if(__version_info__[4] is None):
129 __version_date_plusrc__ = __version_date__;
130 if(__version_info__[3] is not None):
131 __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2])+" "+str(__version_info__[3]);
132 if(__version_info__[3] is None):
133 __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]);
135 tmpfileprefix = "py"+str(sys.version_info[0])+__program_small_name__+str(__version_info__[0])+"-";
136 tmpfilesuffix = "-";
137 pytempdir = tempfile.gettempdir();
139 PyBitness = platform.architecture();
140 if(PyBitness=="32bit" or PyBitness=="32"):
141 PyBitness = "32";
142 elif(PyBitness=="64bit" or PyBitness=="64"):
143 PyBitness = "64";
144 else:
145 PyBitness = "32";
147 compression_supported = "gzip, deflate";
148 if(havebrotli and not havezstd):
149 compression_supported = "gzip, deflate, br";
150 elif(not havebrotli and havezstd):
151 compression_supported = "gzip, deflate, zstd";
152 elif(havebrotli and havezstd):
153 compression_supported = "gzip, deflate, zstd, br";
154 else:
155 compression_supported = "gzip, deflate";
157 geturls_cj = cookielib.CookieJar();
158 windowsNT4_ua_string = "Windows NT 4.0";
159 windowsNT4_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "4.0.0"};
160 windows2k_ua_string = "Windows NT 5.0";
161 windows2k_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "5.0.0"};
162 windowsXP_ua_string = "Windows NT 5.1";
163 windowsXP_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "5.1.0"};
164 windowsXP64_ua_string = "Windows NT 5.2; Win64; x64";
165 windowsXP64_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "5.1.0"};
166 windows7_ua_string = "Windows NT 6.1; Win64; x64";
167 windows7_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "6.1.0"};
168 windows8_ua_string = "Windows NT 6.2; Win64; x64";
169 windows8_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "6.2.0"};
170 windows81_ua_string = "Windows NT 6.3; Win64; x64";
171 windows81_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "6.3.0"};
172 windows10_ua_string = "Windows NT 10.0; Win64; x64";
173 windows10_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "10.0.0"};
174 windows11_ua_string = "Windows NT 11.0; Win64; x64";
175 windows11_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "11.0.0"};
176 geturls_ua_firefox_windows7 = "Mozilla/5.0 ("+windows7_ua_string+"; rv:109.0) Gecko/20100101 Firefox/117.0";
177 geturls_ua_seamonkey_windows7 = "Mozilla/5.0 ("+windows7_ua_string+"; rv:91.0) Gecko/20100101 Firefox/91.0 SeaMonkey/2.53.17";
178 geturls_ua_chrome_windows7 = "Mozilla/5.0 ("+windows7_ua_string+") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36";
179 geturls_ua_chromium_windows7 = "Mozilla/5.0 ("+windows7_ua_string+") AppleWebKit/537.36 (KHTML, like Gecko) Chromium/117.0.0.0 Chrome/117.0.0.0 Safari/537.36";
180 geturls_ua_palemoon_windows7 = "Mozilla/5.0 ("+windows7_ua_string+"; rv:102.0) Gecko/20100101 Goanna/6.3 Firefox/102.0 PaleMoon/32.4.0.1";
181 geturls_ua_opera_windows7 = "Mozilla/5.0 ("+windows7_ua_string+") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36 OPR/102.0.0.0";
182 geturls_ua_vivaldi_windows7 = "Mozilla/5.0 ("+windows7_ua_string+") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36 Vivaldi/6.2.3105.48";
183 geturls_ua_internet_explorer_windows7 = "Mozilla/5.0 ("+windows7_ua_string+"; Trident/7.0; rv:11.0) like Gecko";
184 geturls_ua_microsoft_edge_windows7 = "Mozilla/5.0 ("+windows7_ua_string+") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36 Edg/117.0.2045.31";
185 geturls_ua_pywwwget_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prourl})".format(proname=__project__, prover=__version__, prourl=__project_url__);
186 if(platform.python_implementation()!=""):
187 py_implementation = platform.python_implementation();
188 if(platform.python_implementation()==""):
189 py_implementation = "Python";
190 geturls_ua_pywwwget_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system()+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__);
191 geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)";
192 geturls_ua_googlebot_google_old = "Googlebot/2.1 (+http://www.google.com/bot.html)";
193 geturls_ua = geturls_ua_firefox_windows7;
194 geturls_headers_firefox_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_firefox_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
195 geturls_headers_seamonkey_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_seamonkey_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
196 geturls_headers_chrome_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_chrome_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Google Chrome\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Chromium\";v=\"117\"", 'SEC-CH-UA-FULL-VERSION': "117.0.5938.63"};
197 geturls_headers_chrome_windows7.update(windows7_ua_addon);
198 geturls_headers_chromium_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_chromium_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Chromium\";v=\"117\", \"Not;A=Brand\";v=\"24\"", 'SEC-CH-UA-FULL-VERSION': "117.0.5938.63"};
199 geturls_headers_chromium_windows7.update(windows7_ua_addon);
200 geturls_headers_palemoon_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_palemoon_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
201 geturls_headers_opera_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_opera_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Chromium\";v=\"116\", \"Not;A=Brand\";v=\"8\", \"Opera\";v=\"102\"", 'SEC-CH-UA-FULL-VERSION': "102.0.4880.56"};
202 geturls_headers_opera_windows7.update(windows7_ua_addon);
203 geturls_headers_vivaldi_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_vivaldi_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Google Chrome\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Vivaldi\";v=\"6.2\"", 'SEC-CH-UA-FULL-VERSION': "6.2.3105.48"};
204 geturls_headers_vivaldi_windows7.update(windows7_ua_addon);
205 geturls_headers_internet_explorer_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_internet_explorer_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
206 geturls_headers_microsoft_edge_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_microsoft_edge_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Microsoft Edge\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Chromium\";v=\"117\"", 'SEC-CH-UA-FULL-VERSION': "117.0.2045.31"}
207 geturls_headers_microsoft_edge_windows7.update(windows7_ua_addon);
208 geturls_headers_pywwwget_python = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_pywwwget_python, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\""+__project__+"\";v=\""+str(__version__)+"\", \"Not;A=Brand\";v=\"8\", \""+py_implementation+"\";v=\""+str(platform.release())+"\"", 'SEC-CH-UA-FULL-VERSION': str(__version__), 'SEC-CH-UA-PLATFORM': ""+py_implementation+"", 'SEC-CH-UA-ARCH': ""+platform.machine()+"", 'SEC-CH-UA-PLATFORM': str(__version__), 'SEC-CH-UA-BITNESS': str(PyBitness)};
209 geturls_headers_pywwwget_python_alt = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_pywwwget_python_alt, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\""+__project__+"\";v=\""+str(__version__)+"\", \"Not;A=Brand\";v=\"8\", \""+py_implementation+"\";v=\""+str(platform.release())+"\"", 'SEC-CH-UA-FULL-VERSION': str(__version__), 'SEC-CH-UA-PLATFORM': ""+py_implementation+"", 'SEC-CH-UA-ARCH': ""+platform.machine()+"", 'SEC-CH-UA-PLATFORM': str(__version__), 'SEC-CH-UA-BITNESS': str(PyBitness)};
210 geturls_headers_googlebot_google = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_googlebot_google, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
211 geturls_headers_googlebot_google_old = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_googlebot_google_old, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
212 geturls_headers = geturls_headers_firefox_windows7;
213 geturls_download_sleep = 0;
215 def verbose_printout(dbgtxt, outtype="log", dbgenable=True, dgblevel=20):
216 if(outtype=="print" and dbgenable):
217 print(dbgtxt);
218 return True;
219 elif(outtype=="log" and dbgenable):
220 logging.info(dbgtxt);
221 return True;
222 elif(outtype=="warning" and dbgenable):
223 logging.warning(dbgtxt);
224 return True;
225 elif(outtype=="error" and dbgenable):
226 logging.error(dbgtxt);
227 return True;
228 elif(outtype=="critical" and dbgenable):
229 logging.critical(dbgtxt);
230 return True;
231 elif(outtype=="exception" and dbgenable):
232 logging.exception(dbgtxt);
233 return True;
234 elif(outtype=="logalt" and dbgenable):
235 logging.log(dgblevel, dbgtxt);
236 return True;
237 elif(outtype=="debug" and dbgenable):
238 logging.debug(dbgtxt);
239 return True;
240 elif(not dbgenable):
241 return True;
242 else:
243 return False;
244 return False;
246 def verbose_printout_return(dbgtxt, outtype="log", dbgenable=True, dgblevel=20):
247 dbgout = verbose_printout(dbgtxt, outtype, dbgenable, dgblevel);
248 if(not dbgout):
249 return False;
250 return dbgtxt;
252 def add_url_param(url, **params):
253 n=3;
254 parts = list(urlparse.urlsplit(url));
255 d = dict(cgi.parse_qsl(parts[n])); # use cgi.parse_qs for list values
256 d.update(params);
257 parts[n]=urlencode(d);
258 return urlparse.urlunsplit(parts);
260 os.environ["PATH"] = os.environ["PATH"] + os.pathsep + os.path.dirname(os.path.realpath(__file__)) + os.pathsep + os.getcwd();
261 def which_exec(execfile):
262 for path in os.environ["PATH"].split(":"):
263 if os.path.exists(path + "/" + execfile):
264 return path + "/" + execfile;
266 def listize(varlist):
267 il = 0;
268 ix = len(varlist);
269 ilx = 1;
270 newlistreg = {};
271 newlistrev = {};
272 newlistfull = {};
273 while(il < ix):
274 newlistreg.update({ilx: varlist[il]});
275 newlistrev.update({varlist[il]: ilx});
276 ilx = ilx + 1;
277 il = il + 1;
278 newlistfull = {1: newlistreg, 2: newlistrev, 'reg': newlistreg, 'rev': newlistrev};
279 return newlistfull;
281 def twolistize(varlist):
282 il = 0;
283 ix = len(varlist);
284 ilx = 1;
285 newlistnamereg = {};
286 newlistnamerev = {};
287 newlistdescreg = {};
288 newlistdescrev = {};
289 newlistfull = {};
290 while(il < ix):
291 newlistnamereg.update({ilx: varlist[il][0].strip()});
292 newlistnamerev.update({varlist[il][0].strip(): ilx});
293 newlistdescreg.update({ilx: varlist[il][1].strip()});
294 newlistdescrev.update({varlist[il][1].strip(): ilx});
295 ilx = ilx + 1;
296 il = il + 1;
297 newlistnametmp = {1: newlistnamereg, 2: newlistnamerev, 'reg': newlistnamereg, 'rev': newlistnamerev};
298 newlistdesctmp = {1: newlistdescreg, 2: newlistdescrev, 'reg': newlistdescreg, 'rev': newlistdescrev};
299 newlistfull = {1: newlistnametmp, 2: newlistdesctmp, 'name': newlistnametmp, 'desc': newlistdesctmp}
300 return newlistfull;
302 def arglistize(proexec, *varlist):
303 il = 0;
304 ix = len(varlist);
305 ilx = 1;
306 newarglist = [proexec];
307 while(il < ix):
308 if varlist[il][0] is not None:
309 newarglist.append(varlist[il][0]);
310 if varlist[il][1] is not None:
311 newarglist.append(varlist[il][1]);
312 il = il + 1;
313 return newarglist;
315 def fix_header_names(header_dict):
316 if(sys.version[0]=="2"):
317 header_dict = {k.title(): v for k, v in header_dict.iteritems()};
318 if(sys.version[0]>="3"):
319 header_dict = {k.title(): v for k, v in header_dict.items()};
320 return header_dict;
322 # hms_string by ArcGIS Python Recipes
323 # https://arcpy.wordpress.com/2012/04/20/146/
324 def hms_string(sec_elapsed):
325 h = int(sec_elapsed / (60 * 60));
326 m = int((sec_elapsed % (60 * 60)) / 60);
327 s = sec_elapsed % 60.0;
328 return "{}:{:>02}:{:>05.2f}".format(h, m, s);
330 # get_readable_size by Lipis
331 # http://stackoverflow.com/posts/14998888/revisions
332 def get_readable_size(bytes, precision=1, unit="IEC"):
333 unit = unit.upper();
334 if(unit!="IEC" and unit!="SI"):
335 unit = "IEC";
336 if(unit=="IEC"):
337 units = [" B"," KiB"," MiB"," GiB"," TiB"," PiB"," EiB"," ZiB"];
338 unitswos = ["B","KiB","MiB","GiB","TiB","PiB","EiB","ZiB"];
339 unitsize = 1024.0;
340 if(unit=="SI"):
341 units = [" B"," kB"," MB"," GB"," TB"," PB"," EB"," ZB"];
342 unitswos = ["B","kB","MB","GB","TB","PB","EB","ZB"];
343 unitsize = 1000.0;
344 return_val = {};
345 orgbytes = bytes;
346 for unit in units:
347 if abs(bytes) < unitsize:
348 strformat = "%3."+str(precision)+"f%s";
349 pre_return_val = (strformat % (bytes, unit));
350 pre_return_val = re.sub(r"([0]+) ([A-Za-z]+)", r" \2", pre_return_val);
351 pre_return_val = re.sub(r"\. ([A-Za-z]+)", r" \1", pre_return_val);
352 alt_return_val = pre_return_val.split();
353 return_val = {'Bytes': orgbytes, 'ReadableWithSuffix': pre_return_val, 'ReadableWithoutSuffix': alt_return_val[0], 'ReadableSuffix': alt_return_val[1]}
354 return return_val;
355 bytes /= unitsize;
356 strformat = "%."+str(precision)+"f%s";
357 pre_return_val = (strformat % (bytes, "YiB"));
358 pre_return_val = re.sub(r"([0]+) ([A-Za-z]+)", r" \2", pre_return_val);
359 pre_return_val = re.sub(r"\. ([A-Za-z]+)", r" \1", pre_return_val);
360 alt_return_val = pre_return_val.split();
361 return_val = {'Bytes': orgbytes, 'ReadableWithSuffix': pre_return_val, 'ReadableWithoutSuffix': alt_return_val[0], 'ReadableSuffix': alt_return_val[1]}
362 return return_val;
364 def get_readable_size_from_file(infile, precision=1, unit="IEC", usehashes=False, usehashtypes="md5,sha1"):
365 unit = unit.upper();
366 usehashtypes = usehashtypes.lower();
367 getfilesize = os.path.getsize(infile);
368 return_val = get_readable_size(getfilesize, precision, unit);
369 if(usehashes):
370 hashtypelist = usehashtypes.split(",");
371 openfile = open(infile, "rb");
372 filecontents = openfile.read();
373 openfile.close();
374 listnumcount = 0;
375 listnumend = len(hashtypelist);
376 while(listnumcount < listnumend):
377 hashtypelistlow = hashtypelist[listnumcount].strip();
378 hashtypelistup = hashtypelistlow.upper();
379 filehash = hashlib.new(hashtypelistup);
380 filehash.update(filecontents);
381 filegethash = filehash.hexdigest();
382 return_val.update({hashtypelistup: filegethash});
383 listnumcount += 1;
384 return return_val;
386 def get_readable_size_from_string(instring, precision=1, unit="IEC", usehashes=False, usehashtypes="md5,sha1"):
387 unit = unit.upper();
388 usehashtypes = usehashtypes.lower();
389 getfilesize = len(instring);
390 return_val = get_readable_size(getfilesize, precision, unit);
391 if(usehashes):
392 hashtypelist = usehashtypes.split(",");
393 listnumcount = 0;
394 listnumend = len(hashtypelist);
395 while(listnumcount < listnumend):
396 hashtypelistlow = hashtypelist[listnumcount].strip();
397 hashtypelistup = hashtypelistlow.upper();
398 filehash = hashlib.new(hashtypelistup);
399 if(sys.version[0]=="2"):
400 filehash.update(instring);
401 if(sys.version[0]>="3"):
402 filehash.update(instring.encode('utf-8'));
403 filegethash = filehash.hexdigest();
404 return_val.update({hashtypelistup: filegethash});
405 listnumcount += 1;
406 return return_val;
408 def http_status_to_reason(code):
409 reasons = {
410 100: 'Continue',
411 101: 'Switching Protocols',
412 102: 'Processing',
413 200: 'OK',
414 201: 'Created',
415 202: 'Accepted',
416 203: 'Non-Authoritative Information',
417 204: 'No Content',
418 205: 'Reset Content',
419 206: 'Partial Content',
420 207: 'Multi-Status',
421 208: 'Already Reported',
422 226: 'IM Used',
423 300: 'Multiple Choices',
424 301: 'Moved Permanently',
425 302: 'Found',
426 303: 'See Other',
427 304: 'Not Modified',
428 305: 'Use Proxy',
429 307: 'Temporary Redirect',
430 308: 'Permanent Redirect',
431 400: 'Bad Request',
432 401: 'Unauthorized',
433 402: 'Payment Required',
434 403: 'Forbidden',
435 404: 'Not Found',
436 405: 'Method Not Allowed',
437 406: 'Not Acceptable',
438 407: 'Proxy Authentication Required',
439 408: 'Request Timeout',
440 409: 'Conflict',
441 410: 'Gone',
442 411: 'Length Required',
443 412: 'Precondition Failed',
444 413: 'Payload Too Large',
445 414: 'URI Too Long',
446 415: 'Unsupported Media Type',
447 416: 'Range Not Satisfiable',
448 417: 'Expectation Failed',
449 421: 'Misdirected Request',
450 422: 'Unprocessable Entity',
451 423: 'Locked',
452 424: 'Failed Dependency',
453 426: 'Upgrade Required',
454 428: 'Precondition Required',
455 429: 'Too Many Requests',
456 431: 'Request Header Fields Too Large',
457 451: 'Unavailable For Legal Reasons',
458 500: 'Internal Server Error',
459 501: 'Not Implemented',
460 502: 'Bad Gateway',
461 503: 'Service Unavailable',
462 504: 'Gateway Timeout',
463 505: 'HTTP Version Not Supported',
464 506: 'Variant Also Negotiates',
465 507: 'Insufficient Storage',
466 508: 'Loop Detected',
467 510: 'Not Extended',
468 511: 'Network Authentication Required'
470 return reasons.get(code, 'Unknown Status Code');
472 def ftp_status_to_reason(code):
473 reasons = {
474 110: 'Restart marker reply',
475 120: 'Service ready in nnn minutes',
476 125: 'Data connection already open; transfer starting',
477 150: 'File status okay; about to open data connection',
478 200: 'Command okay',
479 202: 'Command not implemented, superfluous at this site',
480 211: 'System status, or system help reply',
481 212: 'Directory status',
482 213: 'File status',
483 214: 'Help message',
484 215: 'NAME system type',
485 220: 'Service ready for new user',
486 221: 'Service closing control connection',
487 225: 'Data connection open; no transfer in progress',
488 226: 'Closing data connection',
489 227: 'Entering Passive Mode',
490 230: 'User logged in, proceed',
491 250: 'Requested file action okay, completed',
492 257: '"PATHNAME" created',
493 331: 'User name okay, need password',
494 332: 'Need account for login',
495 350: 'Requested file action pending further information',
496 421: 'Service not available, closing control connection',
497 425: 'Can\'t open data connection',
498 426: 'Connection closed; transfer aborted',
499 450: 'Requested file action not taken',
500 451: 'Requested action aborted. Local error in processing',
501 452: 'Requested action not taken. Insufficient storage space in system',
502 500: 'Syntax error, command unrecognized',
503 501: 'Syntax error in parameters or arguments',
504 502: 'Command not implemented',
505 503: 'Bad sequence of commands',
506 504: 'Command not implemented for that parameter',
507 530: 'Not logged in',
508 532: 'Need account for storing files',
509 550: 'Requested action not taken. File unavailable',
510 551: 'Requested action aborted. Page type unknown',
511 552: 'Requested file action aborted. Exceeded storage allocation',
512 553: 'Requested action not taken. File name not allowed'
514 return reasons.get(code, 'Unknown Status Code');
516 def sftp_status_to_reason(code):
517 reasons = {
518 0: 'SSH_FX_OK',
519 1: 'SSH_FX_EOF',
520 2: 'SSH_FX_NO_SUCH_FILE',
521 3: 'SSH_FX_PERMISSION_DENIED',
522 4: 'SSH_FX_FAILURE',
523 5: 'SSH_FX_BAD_MESSAGE',
524 6: 'SSH_FX_NO_CONNECTION',
525 7: 'SSH_FX_CONNECTION_LOST',
526 8: 'SSH_FX_OP_UNSUPPORTED'
528 return reasons.get(code, 'Unknown Status Code');
530 def make_http_headers_from_dict_to_list(headers={'Referer': "http://google.com/", 'User-Agent': geturls_ua, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"}):
531 if isinstance(headers, dict):
532 returnval = [];
533 if(sys.version[0]=="2"):
534 for headkey, headvalue in headers.iteritems():
535 returnval.append((headkey, headvalue));
536 if(sys.version[0]>="3"):
537 for headkey, headvalue in headers.items():
538 returnval.append((headkey, headvalue));
539 elif isinstance(headers, list):
540 returnval = headers;
541 else:
542 returnval = False;
543 return returnval;
545 def make_http_headers_from_dict_to_pycurl(headers={'Referer': "http://google.com/", 'User-Agent': geturls_ua, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"}):
546 if isinstance(headers, dict):
547 returnval = [];
548 if(sys.version[0]=="2"):
549 for headkey, headvalue in headers.iteritems():
550 returnval.append(headkey+": "+headvalue);
551 if(sys.version[0]>="3"):
552 for headkey, headvalue in headers.items():
553 returnval.append(headkey+": "+headvalue);
554 elif isinstance(headers, list):
555 returnval = headers;
556 else:
557 returnval = False;
558 return returnval;
560 def make_http_headers_from_pycurl_to_dict(headers):
561 header_dict = {};
562 headers = headers.strip().split('\r\n');
563 for header in headers:
564 parts = header.split(': ', 1)
565 if(len(parts) == 2):
566 key, value = parts;
567 header_dict[key.title()] = value;
568 return header_dict;
570 def make_http_headers_from_list_to_dict(headers=[("Referer", "http://google.com/"), ("User-Agent", geturls_ua), ("Accept-Encoding", compression_supported), ("Accept-Language", "en-US,en;q=0.8,en-CA,en-GB;q=0.6"), ("Accept-Charset", "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7"), ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), ("Connection", "close")]):
571 if isinstance(headers, list):
572 returnval = {};
573 mli = 0;
574 mlil = len(headers);
575 while(mli<mlil):
576 returnval.update({headers[mli][0]: headers[mli][1]});
577 mli = mli + 1;
578 elif isinstance(headers, dict):
579 returnval = headers;
580 else:
581 returnval = False;
582 return returnval;
584 def get_httplib_support(checkvalue=None):
585 global haverequests, havemechanize, havehttplib2, haveurllib3, havehttpx, havehttpcore, haveparamiko, havepysftp;
586 returnval = [];
587 returnval.append("ftp");
588 returnval.append("httplib");
589 if(havehttplib2):
590 returnval.append("httplib2");
591 returnval.append("urllib");
592 if(haveurllib3):
593 returnval.append("urllib3");
594 returnval.append("request3");
595 returnval.append("request");
596 if(haverequests):
597 returnval.append("requests");
598 if(havehttpx):
599 returnval.append("httpx");
600 returnval.append("httpx2");
601 if(havemechanize):
602 returnval.append("mechanize");
603 if(havepycurl):
604 returnval.append("pycurl");
605 if(hasattr(pycurl, "CURL_HTTP_VERSION_2_0")):
606 returnval.append("pycurl2");
607 if(hasattr(pycurl, "CURL_HTTP_VERSION_3_0")):
608 returnval.append("pycurl3");
609 if(haveparamiko):
610 returnval.append("sftp");
611 if(havepysftp):
612 returnval.append("pysftp");
613 if(not checkvalue is None):
614 if(checkvalue=="urllib1" or checkvalue=="urllib2"):
615 checkvalue = "urllib";
616 if(checkvalue=="httplib1"):
617 checkvalue = "httplib";
618 if(checkvalue in returnval):
619 returnval = True;
620 else:
621 returnval = False;
622 return returnval;
624 def check_httplib_support(checkvalue="urllib"):
625 if(checkvalue=="urllib1" or checkvalue=="urllib2"):
626 checkvalue = "urllib";
627 if(checkvalue=="httplib1"):
628 checkvalue = "httplib";
629 returnval = get_httplib_support(checkvalue);
630 return returnval;
632 def get_httplib_support_list():
633 returnval = get_httplib_support(None);
634 return returnval;
636 def download_from_url(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", buffersize=524288, sleep=-1, timeout=10):
637 global geturls_download_sleep, havezstd, havebrotli, haverequests, havemechanize, havepycurl, havehttplib2, haveurllib3, havehttpx, havehttpcore, haveparamiko, havepysftp;
638 if(sleep<0):
639 sleep = geturls_download_sleep;
640 if(timeout<=0):
641 timeout = 10;
642 if(httplibuse=="urllib1" or httplibuse=="urllib2" or httplibuse=="request"):
643 httplibuse = "urllib";
644 if(httplibuse=="httplib1"):
645 httplibuse = "httplib";
646 if(not haverequests and httplibuse=="requests"):
647 httplibuse = "urllib";
648 if(not havehttpx and httplibuse=="httpx"):
649 httplibuse = "urllib";
650 if(not havehttpx and httplibuse=="httpx2"):
651 httplibuse = "urllib";
652 if(not havehttpcore and httplibuse=="httpcore"):
653 httplibuse = "urllib";
654 if(not havehttpcore and httplibuse=="httpcore2"):
655 httplibuse = "urllib";
656 if(not havemechanize and httplibuse=="mechanize"):
657 httplibuse = "urllib";
658 if(not havepycurl and httplibuse=="pycurl"):
659 httplibuse = "urllib";
660 if(not havepycurl and httplibuse=="pycurl2"):
661 httplibuse = "urllib";
662 if(havepycurl and httplibuse=="pycurl2" and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")):
663 httplibuse = "pycurl";
664 if(not havepycurl and httplibuse=="pycurl3"):
665 httplibuse = "urllib";
666 if(havepycurl and httplibuse=="pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")):
667 httplibuse = "pycurl2";
668 if(havepycurl and httplibuse=="pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")):
669 httplibuse = "pycurl";
670 if(not havehttplib2 and httplibuse=="httplib2"):
671 httplibuse = "httplib";
672 if(not haveparamiko and httplibuse=="sftp"):
673 httplibuse = "ftp";
674 if(not havepysftp and httplibuse=="pysftp"):
675 httplibuse = "ftp";
676 urlparts = urlparse.urlparse(httpurl);
677 if(isinstance(httpheaders, list)):
678 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
679 httpheaders = fix_header_names(httpheaders);
680 if(httpuseragent is not None):
681 if('User-Agent' in httpheaders):
682 httpheaders['User-Agent'] = httpuseragent;
683 else:
684 httpuseragent.update({'User-Agent': httpuseragent});
685 if(httpreferer is not None):
686 if('Referer' in httpheaders):
687 httpheaders['Referer'] = httpreferer;
688 else:
689 httpuseragent.update({'Referer': httpreferer});
690 if(urlparts.username is not None or urlparts.password is not None):
691 if(sys.version[0]=="2"):
692 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password));
693 if(sys.version[0]>="3"):
694 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
695 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
696 geturls_opener = build_opener(HTTPCookieProcessor(httpcookie));
697 if(httplibuse=="urllib" or httplibuse=="mechanize"):
698 if(isinstance(httpheaders, dict)):
699 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
700 if(httplibuse=="pycurl" or httplibuse=="pycurl2" or httplibuse=="pycurl3"):
701 if(isinstance(httpheaders, dict)):
702 httpheaders = make_http_headers_from_dict_to_pycurl(httpheaders);
703 geturls_opener.addheaders = httpheaders;
704 time.sleep(sleep);
705 if(postdata is not None and not isinstance(postdata, dict)):
706 postdata = urlencode(postdata);
707 if(httplibuse=="urllib" or httplibuse=="request"):
708 geturls_request = Request(httpurl);
709 try:
710 if(httpmethod=="GET"):
711 geturls_text = geturls_opener.open(geturls_request);
712 elif(httpmethod=="POST"):
713 geturls_text = geturls_opener.open(geturls_request, data=postdata);
714 else:
715 geturls_text = geturls_opener.open(geturls_request);
716 except HTTPError as geturls_text_error:
717 geturls_text = geturls_text_error;
718 log.info("Error With URL "+httpurl);
719 except URLError:
720 log.info("Error With URL "+httpurl);
721 return False;
722 except socket.timeout:
723 log.info("Error With URL "+httpurl);
724 return False;
725 httpcodeout = geturls_text.getcode();
726 try:
727 httpcodereason = geturls_text.reason;
728 except AttributeError:
729 httpcodereason = http_status_to_reason(geturls_text.getcode());
730 try:
731 httpversionout = geturls_text.version;
732 except AttributeError:
733 httpversionout = "1.1";
734 httpmethodout = geturls_request.get_method();
735 httpurlout = geturls_text.geturl();
736 httpheaderout = geturls_text.info();
737 httpheadersentout = httpheaders;
738 elif(httplibuse=="httplib"):
739 if(urlparts[0]=="http"):
740 httpconn = HTTPConnection(urlparts[1], timeout=timeout);
741 elif(urlparts[0]=="https"):
742 httpconn = HTTPSConnection(urlparts[1], timeout=timeout);
743 else:
744 return False;
745 if(postdata is not None and not isinstance(postdata, dict)):
746 postdata = urlencode(postdata);
747 try:
748 if(httpmethod=="GET"):
749 httpconn.request("GET", urlparts[2], headers=httpheaders);
750 elif(httpmethod=="POST"):
751 httpconn.request("GET", urlparts[2], body=postdata, headers=httpheaders);
752 else:
753 httpconn.request("GET", urlparts[2], headers=httpheaders);
754 except socket.timeout:
755 log.info("Error With URL "+httpurl);
756 return False;
757 except socket.gaierror:
758 log.info("Error With URL "+httpurl);
759 return False;
760 except BlockingIOError:
761 log.info("Error With URL "+httpurl);
762 return False;
763 geturls_text = httpconn.getresponse();
764 httpcodeout = geturls_text.status;
765 httpcodereason = geturls_text.reason;
766 if(geturls_text.version=="10"):
767 httpversionout = "1.0";
768 else:
769 httpversionout = "1.1";
770 httpmethodout = geturls_text._method;
771 httpurlout = httpurl;
772 httpheaderout = geturls_text.getheaders();
773 httpheadersentout = httpheaders;
774 elif(httplibuse=="httplib2"):
775 if(urlparts[0]=="http"):
776 httpconn = HTTPConnectionWithTimeout(urlparts[1], timeout=timeout);
777 elif(urlparts[0]=="https"):
778 httpconn = HTTPSConnectionWithTimeout(urlparts[1], timeout=timeout);
779 else:
780 return False;
781 if(postdata is not None and not isinstance(postdata, dict)):
782 postdata = urlencode(postdata);
783 try:
784 if(httpmethod=="GET"):
785 httpconn.request("GET", urlparts[2], headers=httpheaders);
786 elif(httpmethod=="POST"):
787 httpconn.request("GET", urlparts[2], body=postdata, headers=httpheaders);
788 else:
789 httpconn.request("GET", urlparts[2], headers=httpheaders);
790 except socket.timeout:
791 log.info("Error With URL "+httpurl);
792 return False;
793 except socket.gaierror:
794 log.info("Error With URL "+httpurl);
795 return False;
796 except BlockingIOError:
797 log.info("Error With URL "+httpurl);
798 return False;
799 geturls_text = httpconn.getresponse();
800 httpcodeout = geturls_text.status;
801 httpcodereason = geturls_text.reason;
802 if(geturls_text.version=="10"):
803 httpversionout = "1.0";
804 else:
805 httpversionout = "1.1";
806 httpmethodout = httpmethod;
807 httpurlout = httpurl;
808 httpheaderout = geturls_text.getheaders();
809 httpheadersentout = httpheaders;
810 elif(httplibuse=="urllib3" or httplibuse=="request3"):
811 timeout = urllib3.util.Timeout(connect=timeout, read=timeout);
812 urllib_pool = urllib3.PoolManager(headers=httpheaders, timeout=timeout);
813 try:
814 if(httpmethod=="GET"):
815 geturls_text = urllib_pool.request("GET", httpurl, headers=httpheaders, preload_content=False);
816 elif(httpmethod=="POST"):
817 geturls_text = urllib_pool.request("POST", httpurl, body=postdata, headers=httpheaders, preload_content=False);
818 else:
819 geturls_text = urllib_pool.request("GET", httpurl, headers=httpheaders, preload_content=False);
820 except urllib3.exceptions.ConnectTimeoutError:
821 log.info("Error With URL "+httpurl);
822 return False;
823 except urllib3.exceptions.ConnectError:
824 log.info("Error With URL "+httpurl);
825 return False;
826 except urllib3.exceptions.MaxRetryError:
827 log.info("Error With URL "+httpurl);
828 return False;
829 except socket.timeout:
830 log.info("Error With URL "+httpurl);
831 return False;
832 except ValueError:
833 log.info("Error With URL "+httpurl);
834 return False;
835 httpcodeout = geturls_text.status;
836 httpcodereason = geturls_text.reason;
837 if(geturls_text.version=="10"):
838 httpversionout = "1.0";
839 else:
840 httpversionout = "1.1";
841 httpmethodout = httpmethod;
842 httpurlout = geturls_text.geturl();
843 httpheaderout = geturls_text.info();
844 httpheadersentout = httpheaders;
845 elif(httplibuse=="requests"):
846 try:
847 reqsession = requests.Session();
848 if(httpmethod=="GET"):
849 geturls_text = reqsession.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie);
850 elif(httpmethod=="POST"):
851 geturls_text = reqsession.post(httpurl, timeout=timeout, data=postdata, headers=httpheaders, cookies=httpcookie);
852 else:
853 geturls_text = reqsession.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie);
854 except requests.exceptions.ConnectTimeout:
855 log.info("Error With URL "+httpurl);
856 return False;
857 except requests.exceptions.ConnectError:
858 log.info("Error With URL "+httpurl);
859 return False;
860 except socket.timeout:
861 log.info("Error With URL "+httpurl);
862 return False;
863 httpcodeout = geturls_text.status_code;
864 httpcodereason = geturls_text.reason;
865 if(geturls_text.raw.version=="10"):
866 httpversionout = "1.0";
867 else:
868 httpversionout = "1.1";
869 httpmethodout = httpmethod;
870 httpurlout = geturls_text.url;
871 httpheaderout = geturls_text.headers;
872 httpheadersentout = geturls_text.request.headers;
873 elif(httplibuse=="httpx"):
874 try:
875 if(httpmethod=="GET"):
876 httpx_pool = httpx.Client(http1=True, http2=False, trust_env=True);
877 geturls_text = httpx_pool.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie);
878 elif(httpmethod=="POST"):
879 httpx_pool = httpx.Client(http1=True, http2=False, trust_env=True);
880 geturls_text = httpx_pool.post(httpurl, timeout=timeout, data=postdata, headers=httpheaders, cookies=httpcookie);
881 else:
882 httpx_pool = httpx.Client(http1=True, http2=False, trust_env=True);
883 geturls_text = httpx_pool.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie);
884 except httpx.ConnectTimeout:
885 log.info("Error With URL "+httpurl);
886 return False;
887 except httpx.ConnectError:
888 log.info("Error With URL "+httpurl);
889 return False;
890 except socket.timeout:
891 log.info("Error With URL "+httpurl);
892 return False;
893 httpcodeout = geturls_text.status_code;
894 try:
895 httpcodereason = geturls_text.reason_phrase;
896 except:
897 httpcodereason = http_status_to_reason(geturls_text.status_code);
898 httpversionout = geturls_text.http_version;
899 httpmethodout = httpmethod;
900 httpurlout = str(geturls_text.url);
901 httpheaderout = geturls_text.headers;
902 httpheadersentout = geturls_text.request.headers;
903 elif(httplibuse=="httpx2"):
904 try:
905 if(httpmethod=="GET"):
906 httpx_pool = httpx.Client(http1=True, http2=True, trust_env=True);
907 geturls_text = httpx_pool.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie);
908 elif(httpmethod=="POST"):
909 httpx_pool = httpx.Client(http1=True, http2=True, trust_env=True);
910 geturls_text = httpx_pool.post(httpurl, timeout=timeout, data=postdata, headers=httpheaders, cookies=httpcookie);
911 else:
912 httpx_pool = httpx.Client(http1=True, http2=True, trust_env=True);
913 geturls_text = httpx_pool.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie);
914 except httpx.ConnectTimeout:
915 log.info("Error With URL "+httpurl);
916 return False;
917 except httpx.ConnectError:
918 log.info("Error With URL "+httpurl);
919 return False;
920 except socket.timeout:
921 log.info("Error With URL "+httpurl);
922 return False;
923 httpcodeout = geturls_text.status_code;
924 try:
925 httpcodereason = geturls_text.reason_phrase;
926 except:
927 httpcodereason = http_status_to_reason(geturls_text.status_code);
928 httpversionout = geturls_text.http_version;
929 httpmethodout = httpmethod;
930 httpurlout = str(geturls_text.url);
931 httpheaderout = geturls_text.headers;
932 httpheadersentout = geturls_text.request.headers;
933 elif(httplibuse=="httpcore"):
934 try:
935 if(httpmethod=="GET"):
936 httpx_pool = httpcore.ConnectionPool(http1=True, http2=False);
937 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
938 elif(httpmethod=="POST"):
939 httpx_pool = httpcore.ConnectionPool(http1=True, http2=False);
940 geturls_text = httpx_pool.request("GET", httpurl, data=postdata, headers=httpheaders);
941 else:
942 httpx_pool = httpcore.ConnectionPool(http1=True, http2=False);
943 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
944 except httpcore.ConnectTimeout:
945 log.info("Error With URL "+httpurl);
946 return False;
947 except httpcore.ConnectError:
948 log.info("Error With URL "+httpurl);
949 return False;
950 except socket.timeout:
951 log.info("Error With URL "+httpurl);
952 return False;
953 httpcodeout = geturls_text.status;
954 httpcodereason = http_status_to_reason(geturls_text.status);
955 httpversionout = "1.1";
956 httpmethodout = httpmethod;
957 httpurlout = str(httpurl);
958 httpheaderout = geturls_text.headers;
959 httpheadersentout = httpheaders;
960 elif(httplibuse=="httpcore2"):
961 try:
962 if(httpmethod=="GET"):
963 httpx_pool = httpcore.ConnectionPool(http1=True, http2=True);
964 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
965 elif(httpmethod=="POST"):
966 httpx_pool = httpcore.ConnectionPool(http1=True, http2=True);
967 geturls_text = httpx_pool.request("GET", httpurl, data=postdata, headers=httpheaders);
968 else:
969 httpx_pool = httpcore.ConnectionPool(http1=True, http2=True);
970 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
971 except httpcore.ConnectTimeout:
972 log.info("Error With URL "+httpurl);
973 return False;
974 except httpcore.ConnectError:
975 log.info("Error With URL "+httpurl);
976 return False;
977 except socket.timeout:
978 log.info("Error With URL "+httpurl);
979 return False;
980 httpcodeout = geturls_text.status;
981 httpcodereason = http_status_to_reason(geturls_text.status);
982 httpversionout = "1.1";
983 httpmethodout = httpmethod;
984 httpurlout = str(httpurl);
985 httpheaderout = geturls_text.headers;
986 httpheadersentout = httpheaders;
987 elif(httplibuse=="mechanize"):
988 geturls_opener = mechanize.Browser();
989 if(isinstance(httpheaders, dict)):
990 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
991 time.sleep(sleep);
992 geturls_opener.addheaders = httpheaders;
993 geturls_opener.set_cookiejar(httpcookie);
994 geturls_opener.set_handle_robots(False);
995 if(postdata is not None and not isinstance(postdata, dict)):
996 postdata = urlencode(postdata);
997 try:
998 if(httpmethod=="GET"):
999 geturls_text = geturls_opener.open(httpurl);
1000 elif(httpmethod=="POST"):
1001 geturls_text = geturls_opener.open(httpurl, data=postdata);
1002 else:
1003 geturls_text = geturls_opener.open(httpurl);
1004 except mechanize.HTTPError as geturls_text_error:
1005 geturls_text = geturls_text_error;
1006 log.info("Error With URL "+httpurl);
1007 except URLError:
1008 log.info("Error With URL "+httpurl);
1009 return False;
1010 except socket.timeout:
1011 log.info("Error With URL "+httpurl);
1012 return False;
1013 httpcodeout = geturls_text.code;
1014 httpcodereason = geturls_text.msg;
1015 httpversionout = "1.1";
1016 httpmethodout = httpmethod;
1017 httpurlout = geturls_text.geturl();
1018 httpheaderout = geturls_text.info();
1019 reqhead = geturls_opener.request;
1020 httpheadersentout = reqhead.header_items();
1021 elif(httplibuse=="pycurl"):
1022 retrieved_body = BytesIO();
1023 retrieved_headers = BytesIO();
1024 try:
1025 if(httpmethod=="GET"):
1026 geturls_text = pycurl.Curl();
1027 geturls_text.setopt(geturls_text.URL, httpurl);
1028 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_1_1);
1029 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1030 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1031 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1032 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1033 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1034 geturls_text.perform();
1035 elif(httpmethod=="POST"):
1036 geturls_text = pycurl.Curl();
1037 geturls_text.setopt(geturls_text.URL, httpurl);
1038 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_1_1);
1039 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1040 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1041 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1042 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1043 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1044 geturls_text.setopt(geturls_text.POST, True);
1045 geturls_text.setopt(geturls_text.POSTFIELDS, postdata);
1046 geturls_text.perform();
1047 else:
1048 geturls_text = pycurl.Curl();
1049 geturls_text.setopt(geturls_text.URL, httpurl);
1050 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_1_1);
1051 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1052 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1053 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1054 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1055 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1056 geturls_text.perform();
1057 retrieved_headers.seek(0);
1058 if(sys.version[0]=="2"):
1059 pycurlhead = retrieved_headers.read();
1060 if(sys.version[0]>="3"):
1061 pycurlhead = retrieved_headers.read().decode('UTF-8');
1062 pyhttpverinfo = re.findall(r'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', pycurlhead.splitlines()[0].strip().rstrip('\r\n'))[0];
1063 pycurlheadersout = make_http_headers_from_pycurl_to_dict(pycurlhead);
1064 retrieved_body.seek(0);
1065 except socket.timeout:
1066 log.info("Error With URL "+httpurl);
1067 return False;
1068 except socket.gaierror:
1069 log.info("Error With URL "+httpurl);
1070 return False;
1071 except ValueError:
1072 log.info("Error With URL "+httpurl);
1073 return False;
1074 httpcodeout = geturls_text.getinfo(geturls_text.HTTP_CODE);
1075 httpcodereason = http_status_to_reason(geturls_text.getinfo(geturls_text.HTTP_CODE));
1076 httpversionout = pyhttpverinfo[0];
1077 httpmethodout = httpmethod;
1078 httpurlout = geturls_text.getinfo(geturls_text.EFFECTIVE_URL);
1079 httpheaderout = pycurlheadersout;
1080 httpheadersentout = httpheaders;
1081 elif(httplibuse=="pycurl2"):
1082 retrieved_body = BytesIO();
1083 retrieved_headers = BytesIO();
1084 try:
1085 if(httpmethod=="GET"):
1086 geturls_text = pycurl.Curl();
1087 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_2_0);
1088 geturls_text.setopt(geturls_text.URL, httpurl);
1089 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1090 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1091 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1092 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1093 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1094 geturls_text.perform();
1095 elif(httpmethod=="POST"):
1096 geturls_text = pycurl.Curl();
1097 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_2_0);
1098 geturls_text.setopt(geturls_text.URL, httpurl);
1099 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1100 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1101 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1102 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1103 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1104 geturls_text.setopt(geturls_text.POST, True);
1105 geturls_text.setopt(geturls_text.POSTFIELDS, postdata);
1106 geturls_text.perform();
1107 else:
1108 geturls_text = pycurl.Curl();
1109 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_2_0);
1110 geturls_text.setopt(geturls_text.URL, httpurl);
1111 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1112 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1113 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1114 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1115 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1116 geturls_text.perform();
1117 retrieved_headers.seek(0);
1118 if(sys.version[0]=="2"):
1119 pycurlhead = retrieved_headers.read();
1120 if(sys.version[0]>="3"):
1121 pycurlhead = retrieved_headers.read().decode('UTF-8');
1122 pyhttpverinfo = re.findall(r'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', pycurlhead.splitlines()[0].strip())[0];
1123 pycurlheadersout = make_http_headers_from_pycurl_to_dict(pycurlhead);
1124 retrieved_body.seek(0);
1125 except socket.timeout:
1126 log.info("Error With URL "+httpurl);
1127 return False;
1128 except socket.gaierror:
1129 log.info("Error With URL "+httpurl);
1130 return False;
1131 except ValueError:
1132 log.info("Error With URL "+httpurl);
1133 return False;
1134 httpcodeout = geturls_text.getinfo(geturls_text.HTTP_CODE);
1135 httpcodereason = http_status_to_reason(geturls_text.getinfo(geturls_text.HTTP_CODE));
1136 httpversionout = pyhttpverinfo[0];
1137 httpmethodout = httpmethod;
1138 httpurlout = geturls_text.getinfo(geturls_text.EFFECTIVE_URL);
1139 httpheaderout = pycurlheadersout;
1140 httpheadersentout = httpheaders;
1141 elif(httplibuse=="pycurl3"):
1142 retrieved_body = BytesIO();
1143 retrieved_headers = BytesIO();
1144 try:
1145 if(httpmethod=="GET"):
1146 geturls_text = pycurl.Curl();
1147 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_3_0);
1148 geturls_text.setopt(geturls_text.URL, httpurl);
1149 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1150 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1151 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1152 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1153 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1154 geturls_text.perform();
1155 elif(httpmethod=="POST"):
1156 geturls_text = pycurl.Curl();
1157 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_3_0);
1158 geturls_text.setopt(geturls_text.URL, httpurl);
1159 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1160 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1161 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1162 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1163 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1164 geturls_text.setopt(geturls_text.POST, True);
1165 geturls_text.setopt(geturls_text.POSTFIELDS, postdata);
1166 geturls_text.perform();
1167 else:
1168 geturls_text = pycurl.Curl();
1169 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_3_0);
1170 geturls_text.setopt(geturls_text.URL, httpurl);
1171 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1172 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1173 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1174 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1175 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1176 geturls_text.perform();
1177 retrieved_headers.seek(0);
1178 if(sys.version[0]=="2"):
1179 pycurlhead = retrieved_headers.read();
1180 if(sys.version[0]>="3"):
1181 pycurlhead = retrieved_headers.read().decode('UTF-8');
1182 pyhttpverinfo = re.findall(r'^HTTP/([0-9.]+) (\d+)(?: ([A-Za-z\s]+))?$', pycurlhead.splitlines()[0].strip().rstrip('\r\n'))[0];
1183 pycurlheadersout = make_http_headers_from_pycurl_to_dict(pycurlhead);
1184 retrieved_body.seek(0);
1185 except socket.timeout:
1186 log.info("Error With URL "+httpurl);
1187 return False;
1188 except socket.gaierror:
1189 log.info("Error With URL "+httpurl);
1190 return False;
1191 except ValueError:
1192 log.info("Error With URL "+httpurl);
1193 return False;
1194 httpcodeout = geturls_text.getinfo(geturls_text.HTTP_CODE);
1195 httpcodereason = http_status_to_reason(geturls_text.getinfo(geturls_text.HTTP_CODE));
1196 httpversionout = pyhttpverinfo[0];
1197 httpmethodout = httpmethod;
1198 httpurlout = geturls_text.getinfo(geturls_text.EFFECTIVE_URL);
1199 httpheaderout = pycurlheadersout;
1200 httpheadersentout = httpheaders;
1201 elif(httplibuse=="ftp"):
1202 geturls_text = download_file_from_ftp_file(httpurl);
1203 if(not geturls_text):
1204 return False;
1205 downloadsize = None;
1206 if(downloadsize is not None):
1207 downloadsize = int(downloadsize);
1208 if downloadsize is None: downloadsize = 0;
1209 fulldatasize = 0;
1210 prevdownsize = 0;
1211 log.info("Downloading URL "+httpurl);
1212 with BytesIO() as strbuf:
1213 while True:
1214 databytes = geturls_text.read(buffersize);
1215 if not databytes: break;
1216 datasize = len(databytes);
1217 fulldatasize = datasize + fulldatasize;
1218 percentage = "";
1219 if(downloadsize>0):
1220 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1221 downloaddiff = fulldatasize - prevdownsize;
1222 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1223 prevdownsize = fulldatasize;
1224 strbuf.write(databytes);
1225 strbuf.seek(0);
1226 returnval_content = strbuf.read();
1227 returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size(fulldatasize, 2, "SI")}, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None};
1228 geturls_text.close();
1229 elif(httplibuse=="sftp"):
1230 geturls_text = download_file_from_sftp_file(httpurl);
1231 if(not geturls_text):
1232 return False;
1233 downloadsize = None;
1234 if(downloadsize is not None):
1235 downloadsize = int(downloadsize);
1236 if downloadsize is None: downloadsize = 0;
1237 fulldatasize = 0;
1238 prevdownsize = 0;
1239 log.info("Downloading URL "+httpurl);
1240 with BytesIO() as strbuf:
1241 while True:
1242 databytes = geturls_text.read(buffersize);
1243 if not databytes: break;
1244 datasize = len(databytes);
1245 fulldatasize = datasize + fulldatasize;
1246 percentage = "";
1247 if(downloadsize>0):
1248 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1249 downloaddiff = fulldatasize - prevdownsize;
1250 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1251 prevdownsize = fulldatasize;
1252 strbuf.write(databytes);
1253 strbuf.seek(0);
1254 returnval_content = strbuf.read();
1255 returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size(fulldatasize, 2, "SI")}, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None};
1256 geturls_text.close();
1257 return returnval;
1258 elif(httplibuse=="pysftp"):
1259 geturls_text = download_file_from_pysftp_file(httpurl);
1260 if(not geturls_text):
1261 return False;
1262 downloadsize = None;
1263 if(downloadsize is not None):
1264 downloadsize = int(downloadsize);
1265 if downloadsize is None: downloadsize = 0;
1266 fulldatasize = 0;
1267 prevdownsize = 0;
1268 log.info("Downloading URL "+httpurl);
1269 with BytesIO() as strbuf:
1270 while True:
1271 databytes = geturls_text.read(buffersize);
1272 if not databytes: break;
1273 datasize = len(databytes);
1274 fulldatasize = datasize + fulldatasize;
1275 percentage = "";
1276 if(downloadsize>0):
1277 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1278 downloaddiff = fulldatasize - prevdownsize;
1279 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1280 prevdownsize = fulldatasize;
1281 strbuf.write(databytes);
1282 strbuf.seek(0);
1283 returnval_content = strbuf.read();
1284 returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size(fulldatasize, 2, "SI")}, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None};
1285 geturls_text.close();
1286 return returnval;
1287 else:
1288 returnval = False;
1289 if(isinstance(httpheaderout, list) and (httplibuse!="pycurl" and httplibuse!="pycurl2" and httplibuse!="pycurl3")):
1290 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
1291 if(isinstance(httpheaderout, list) and (httplibuse=="pycurl" or httplibuse=="pycurl2" or httplibuse=="pycurl3")):
1292 httpheaderout = dict(make_http_headers_from_pycurl_to_dict("\r\n".join(httpheaderout)));
1293 if(sys.version[0]=="2"):
1294 try:
1295 prehttpheaderout = httpheaderout;
1296 httpheaderkeys = httpheaderout.keys();
1297 imax = len(httpheaderkeys);
1298 ic = 0;
1299 httpheaderout = {};
1300 while(ic < imax):
1301 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
1302 ic += 1;
1303 except AttributeError:
1304 pass;
1305 httpheaderout = fix_header_names(httpheaderout);
1306 if(isinstance(httpheadersentout, list) and (httplibuse!="pycurl" and httplibuse!="pycurl2" and httplibuse!="pycurl3")):
1307 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
1308 if(isinstance(httpheadersentout, list) and (httplibuse=="pycurl" or httplibuse=="pycurl2" or httplibuse=="pycurl3")):
1309 httpheadersentout = dict(make_http_headers_from_pycurl_to_dict("\r\n".join(httpheadersentout)));
1310 httpheadersentout = fix_header_names(httpheadersentout);
1311 log.info("Downloading URL "+httpurl);
1312 if(httplibuse=="urllib" or httplibuse=="request" or httplibuse=="request3" or httplibuse=="httplib" or httplibuse=="httplib2" or httplibuse=="urllib3" or httplibuse=="mechanize" or httplibuse=="httpx" or httplibuse=="httpx2" or httplibuse=="httpcore" or httplibuse=="httpcore2"):
1313 downloadsize = httpheaderout.get('Content-Length');
1314 if(downloadsize is not None):
1315 downloadsize = int(downloadsize);
1316 if downloadsize is None: downloadsize = 0;
1317 fulldatasize = 0;
1318 prevdownsize = 0;
1319 log.info("Downloading URL "+httpurl);
1320 with BytesIO() as strbuf:
1321 while True:
1322 databytes = geturls_text.read(buffersize);
1323 if not databytes: break;
1324 datasize = len(databytes);
1325 fulldatasize = datasize + fulldatasize;
1326 percentage = "";
1327 if(downloadsize>0):
1328 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1329 downloaddiff = fulldatasize - prevdownsize;
1330 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1331 prevdownsize = fulldatasize;
1332 strbuf.write(databytes);
1333 strbuf.seek(0);
1334 returnval_content = strbuf.read();
1335 geturls_text.close();
1336 if(httpheaderout.get("Content-Encoding")=="gzip"):
1337 try:
1338 returnval_content = zlib.decompress(returnval_content, 16+zlib.MAX_WBITS);
1339 except zlib.error:
1340 pass;
1341 elif(httpheaderout.get("Content-Encoding")=="deflate"):
1342 try:
1343 returnval_content = zlib.decompress(returnval_content);
1344 except zlib.error:
1345 pass;
1346 elif(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
1347 try:
1348 returnval_content = brotli.decompress(returnval_content);
1349 except brotli.error:
1350 pass;
1351 elif(httpheaderout.get("Content-Encoding")=="zstd" and havezstd):
1352 try:
1353 returnval_content = zstandard.decompress(returnval_content);
1354 except zstandard.error:
1355 pass;
1356 elif(httplibuse=="requests"):
1357 log.info("Downloading URL "+httpurl);
1358 downloadsize = httpheaderout.get('Content-Length');
1359 if(downloadsize is not None):
1360 downloadsize = int(downloadsize);
1361 if downloadsize is None: downloadsize = 0;
1362 fulldatasize = 0;
1363 prevdownsize = 0;
1364 log.info("Downloading URL "+httpurl);
1365 with BytesIO() as strbuf:
1366 while True:
1367 databytes = geturls_text.raw.read(buffersize);
1368 if not databytes: break;
1369 datasize = len(databytes);
1370 fulldatasize = datasize + fulldatasize;
1371 percentage = "";
1372 if(downloadsize>0):
1373 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1374 downloaddiff = fulldatasize - prevdownsize;
1375 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1376 prevdownsize = fulldatasize;
1377 strbuf.write(databytes);
1378 strbuf.seek(0);
1379 returnval_content = strbuf.read();
1380 geturls_text.close();
1381 if(httpheaderout.get("Content-Encoding")=="gzip"):
1382 try:
1383 returnval_content = zlib.decompress(returnval_content, 16+zlib.MAX_WBITS);
1384 except zlib.error:
1385 pass;
1386 elif(httpheaderout.get("Content-Encoding")=="deflate"):
1387 try:
1388 returnval_content = zlib.decompress(returnval_content);
1389 except zlib.error:
1390 pass;
1391 elif(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
1392 try:
1393 returnval_content = brotli.decompress(returnval_content);
1394 except brotli.error:
1395 pass;
1396 elif(httpheaderout.get("Content-Encoding")=="zstd" and havezstd):
1397 try:
1398 returnval_content = zstandard.decompress(returnval_content);
1399 except zstandard.error:
1400 pass;
1401 elif(httplibuse=="pycurl" or httplibuse=="pycurl2" or httplibuse=="pycurl3"):
1402 log.info("Downloading URL "+httpurl);
1403 downloadsize = httpheaderout.get('Content-Length');
1404 if(downloadsize is not None):
1405 downloadsize = int(downloadsize);
1406 if downloadsize is None: downloadsize = 0;
1407 fulldatasize = 0;
1408 prevdownsize = 0;
1409 log.info("Downloading URL "+httpurl);
1410 with BytesIO() as strbuf:
1411 while True:
1412 databytes = retrieved_body.read(buffersize);
1413 if not databytes: break;
1414 datasize = len(databytes);
1415 fulldatasize = datasize + fulldatasize;
1416 percentage = "";
1417 if(downloadsize>0):
1418 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1419 downloaddiff = fulldatasize - prevdownsize;
1420 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1421 prevdownsize = fulldatasize;
1422 strbuf.write(databytes);
1423 strbuf.seek(0);
1424 returnval_content = strbuf.read();
1425 geturls_text.close();
1426 if(httpheaderout.get("Content-Encoding")=="gzip"):
1427 try:
1428 returnval_content = zlib.decompress(returnval_content, 16+zlib.MAX_WBITS);
1429 except zlib.error:
1430 pass;
1431 elif(httpheaderout.get("Content-Encoding")=="deflate"):
1432 try:
1433 returnval_content = zlib.decompress(returnval_content);
1434 except zlib.error:
1435 pass;
1436 elif(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
1437 try:
1438 returnval_content = brotli.decompress(returnval_content);
1439 except brotli.error:
1440 pass;
1441 elif(httpheaderout.get("Content-Encoding")=="zstd" and havezstd):
1442 try:
1443 returnval_content = zstandard.decompress(returnval_content);
1444 except zstandard.error:
1445 pass;
1446 elif(httplibuse=="ftp" or httplibuse=="sftp" or httplibuse=="pysftp"):
1447 pass;
1448 else:
1449 returnval = False;
1450 returnval = {'Type': "Content", 'Content': returnval_content, 'Contentsize': fulldatasize, 'ContentsizeAlt': {'IEC': get_readable_size(fulldatasize, 2, "IEC"), 'SI': get_readable_size(fulldatasize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason};
1451 return returnval;
1453 def download_from_url_file(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1454 global geturls_download_sleep, havezstd, havebrotli, tmpfileprefix, tmpfilesuffix, haverequests, havemechanize, havehttplib2, haveurllib3, havehttpx, havehttpcore, haveparamiko, havepysftp;
1455 exec_time_start = time.time();
1456 myhash = hashlib.new("sha1");
1457 if(sys.version[0]=="2"):
1458 myhash.update(httpurl);
1459 myhash.update(str(buffersize));
1460 myhash.update(str(exec_time_start));
1461 if(sys.version[0]>="3"):
1462 myhash.update(httpurl.encode('utf-8'));
1463 myhash.update(str(buffersize).encode('utf-8'));
1464 myhash.update(str(exec_time_start).encode('utf-8'));
1465 newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest());
1466 if(sleep<0):
1467 sleep = geturls_download_sleep;
1468 if(timeout<=0):
1469 timeout = 10;
1470 if(httplibuse=="urllib1" or httplibuse=="urllib2" or httplibuse=="request"):
1471 httplibuse = "urllib";
1472 if(httplibuse=="httplib1"):
1473 httplibuse = "httplib";
1474 if(not haverequests and httplibuse=="requests"):
1475 httplibuse = "urllib";
1476 if(not havehttpx and httplibuse=="httpx"):
1477 httplibuse = "urllib";
1478 if(not havehttpx and httplibuse=="httpx2"):
1479 httplibuse = "urllib";
1480 if(not havehttpcore and httplibuse=="httpcore"):
1481 httplibuse = "urllib";
1482 if(not havehttpcore and httplibuse=="httpcore2"):
1483 httplibuse = "urllib";
1484 if(not havemechanize and httplibuse=="mechanize"):
1485 httplibuse = "urllib";
1486 if(not havepycurl and httplibuse=="pycurl"):
1487 httplibuse = "urllib";
1488 if(not havepycurl and httplibuse=="pycurl2"):
1489 httplibuse = "urllib";
1490 if(havepycurl and httplibuse=="pycurl2" and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")):
1491 httplibuse = "pycurl";
1492 if(not havepycurl and httplibuse=="pycurl3"):
1493 httplibuse = "urllib";
1494 if(havepycurl and httplibuse=="pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")):
1495 httplibuse = "pycurl2";
1496 if(havepycurl and httplibuse=="pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")):
1497 httplibuse = "pycurl";
1498 if(not havehttplib2 and httplibuse=="httplib2"):
1499 httplibuse = "httplib";
1500 if(not haveparamiko and httplibuse=="sftp"):
1501 httplibuse = "ftp";
1502 if(not haveparamiko and httplibuse=="pysftp"):
1503 httplibuse = "ftp";
1504 pretmpfilename = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, httplibuse, buffersize, sleep, timeout);
1505 if(not pretmpfilename):
1506 return False;
1507 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
1508 tmpfilename = f.name;
1509 try:
1510 os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple())));
1511 except AttributeError:
1512 try:
1513 os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
1514 except ValueError:
1515 pass;
1516 except ValueError:
1517 pass;
1518 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': pretmpfilename.get('Contentsize'), 'FilesizeAlt': {'IEC': get_readable_size(pretmpfilename.get('Contentsize'), 2, "IEC"), 'SI': get_readable_size(pretmpfilename.get('Contentsize'), 2, "SI")}, 'Headers': pretmpfilename.get('Headers'), 'Version': pretmpfilename.get('Version'), 'Method': pretmpfilename.get('Method'), 'HeadersSent': pretmpfilename.get('HeadersSent'), 'URL': pretmpfilename.get('URL'), 'Code': pretmpfilename.get('Code'), 'Reason': pretmpfilename.get('Reason')};
1519 f.write(pretmpfilename['Content']);
1520 f.close();
1521 exec_time_end = time.time();
1522 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to download file.");
1523 returnval.update({'Filesize': os.path.getsize(tmpfilename), 'FilesizeAlt': {'IEC': get_readable_size(os.path.getsize(tmpfilename), 2, "IEC"), 'SI': get_readable_size(os.path.getsize(tmpfilename), 2, "SI")}, 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)});
1524 return returnval;
1526 def download_from_url_to_file(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1527 global geturls_download_sleep, havezstd, havebrotli, haverequests, havemechanize, havepycurl, havehttplib2, haveurllib3, havehttpx, havehttpcore, haveparamiko, havepysftp;
1528 if(sleep<0):
1529 sleep = geturls_download_sleep;
1530 if(timeout<=0):
1531 timeout = 10;
1532 if(httplibuse=="urllib1" or httplibuse=="urllib2" or httplibuse=="request"):
1533 httplibuse = "urllib";
1534 if(httplibuse=="httplib1"):
1535 httplibuse = "httplib";
1536 if(not haverequests and httplibuse=="requests"):
1537 httplibuse = "urllib";
1538 if(not havehttpx and httplibuse=="httpx"):
1539 httplibuse = "urllib";
1540 if(not havehttpx and httplibuse=="httpx2"):
1541 httplibuse = "urllib";
1542 if(not havehttpcore and httplibuse=="httpcore"):
1543 httplibuse = "urllib";
1544 if(not havehttpcore and httplibuse=="httpcore2"):
1545 httplibuse = "urllib";
1546 if(not havemechanize and httplibuse=="mechanize"):
1547 httplibuse = "urllib";
1548 if(not havepycurl and httplibuse=="pycurl"):
1549 httplibuse = "urllib";
1550 if(not havepycurl and httplibuse=="pycurl2"):
1551 httplibuse = "urllib";
1552 if(havepycurl and httplibuse=="pycurl2" and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")):
1553 httplibuse = "pycurl";
1554 if(not havepycurl and httplibuse=="pycurl3"):
1555 httplibuse = "urllib";
1556 if(havepycurl and httplibuse=="pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and hasattr(pycurl, "CURL_HTTP_VERSION_2_0")):
1557 httplibuse = "pycurl2";
1558 if(havepycurl and httplibuse=="pycurl3" and not hasattr(pycurl, "CURL_HTTP_VERSION_3_0") and not hasattr(pycurl, "CURL_HTTP_VERSION_2_0")):
1559 httplibuse = "pycurl";
1560 if(not havehttplib2 and httplibuse=="httplib2"):
1561 httplibuse = "httplib";
1562 if(not haveparamiko and httplibuse=="sftp"):
1563 httplibuse = "ftp";
1564 if(not havepysftp and httplibuse=="pysftp"):
1565 httplibuse = "ftp";
1566 if(not outfile=="-"):
1567 outpath = outpath.rstrip(os.path.sep);
1568 filepath = os.path.realpath(outpath+os.path.sep+outfile);
1569 if(not os.path.exists(outpath)):
1570 os.makedirs(outpath);
1571 if(os.path.exists(outpath) and os.path.isfile(outpath)):
1572 return False;
1573 if(os.path.exists(filepath) and os.path.isdir(filepath)):
1574 return False;
1575 pretmpfilename = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, httplibuse, ranges, buffersize[0], sleep, timeout);
1576 if(not pretmpfilename):
1577 return False;
1578 tmpfilename = pretmpfilename['Filename'];
1579 downloadsize = int(os.path.getsize(tmpfilename));
1580 fulldatasize = 0;
1581 log.info("Moving file "+tmpfilename+" to "+filepath);
1582 exec_time_start = time.time();
1583 shutil.move(tmpfilename, filepath);
1584 try:
1585 os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple())));
1586 except AttributeError:
1587 try:
1588 os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
1589 except ValueError:
1590 pass;
1591 except ValueError:
1592 pass;
1593 exec_time_end = time.time();
1594 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to move file.");
1595 if(os.path.exists(tmpfilename)):
1596 os.remove(tmpfilename);
1597 returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code'], 'Reason': pretmpfilename['Reason']};
1598 if(outfile=="-"):
1599 pretmpfilename = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, httplibuse, ranges, buffersize[0], sleep, timeout);
1600 tmpfilename = pretmpfilename['Filename'];
1601 downloadsize = int(os.path.getsize(tmpfilename));
1602 fulldatasize = 0;
1603 prevdownsize = 0;
1604 exec_time_start = time.time();
1605 with open(tmpfilename, 'rb') as ft:
1606 f = BytesIO();
1607 while True:
1608 databytes = ft.read(buffersize[1]);
1609 if not databytes: break;
1610 datasize = len(databytes);
1611 fulldatasize = datasize + fulldatasize;
1612 percentage = "";
1613 if(downloadsize>0):
1614 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1615 downloaddiff = fulldatasize - prevdownsize;
1616 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1617 prevdownsize = fulldatasize;
1618 f.write(databytes);
1619 f.seek(0);
1620 fdata = f.getvalue();
1621 f.close();
1622 ft.close();
1623 os.remove(tmpfilename);
1624 exec_time_end = time.time();
1625 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
1626 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code'], 'Reason': pretmpfilename['Reason']};
1627 return returnval;
1629 def download_from_url_with_urllib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1630 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib", buffersize, sleep, timeout);
1631 return returnval;
1633 def download_from_url_with_request(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1634 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib", buffersize, sleep, timeout);
1635 return returnval;
1637 def download_from_url_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1638 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "request3", buffersize, sleep, timeout);
1639 return returnval;
1641 def download_from_url_with_httplib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1642 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httplib", buffersize, sleep, timeout);
1643 return returnval;
1645 def download_from_url_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1646 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httplib2", buffersize, sleep, timeout);
1647 return returnval;
1649 def download_from_url_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1650 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib3", buffersize, sleep, timeout);
1651 return returnval;
1653 def download_from_url_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1654 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "requests", buffersize, sleep, timeout);
1655 return returnval;
1657 def download_from_url_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1658 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpx", buffersize, sleep, timeout);
1659 return returnval;
1661 def download_from_url_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1662 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpx2", buffersize, sleep, timeout);
1663 return returnval;
1665 def download_from_url_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1666 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpcore", buffersize, sleep, timeout);
1667 return returnval;
1669 def download_from_url_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1670 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpcore2", buffersize, sleep, timeout);
1671 return returnval;
1673 def download_from_url_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1674 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "mechanize", buffersize, sleep, timeout);
1675 return returnval;
1677 def download_from_url_with_pycurl(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1678 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl", buffersize, sleep, timeout);
1679 return returnval;
1681 def download_from_url_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1682 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl2", buffersize, sleep, timeout);
1683 return returnval;
1685 def download_from_url_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1686 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl3", buffersize, sleep, timeout);
1687 return returnval;
1689 def download_from_url_with_ftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1690 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "ftp", buffersize, sleep, timeout);
1691 return returnval;
1693 def download_from_url_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1694 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "sftp", buffersize, sleep, timeout);
1695 return returnval;
1697 def download_from_url_with_pysftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
1698 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pysftp", buffersize, sleep, timeout);
1699 return returnval;
1701 def download_from_url_file_with_urllib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1702 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib", ranges, buffersize, sleep, timeout);
1703 return returnval;
1705 def download_from_url_file_with_request(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1706 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib", ranges, buffersize, sleep, timeout);
1707 return returnval;
1709 def download_from_url_file_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1710 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "request3", ranges, buffersize, sleep, timeout);
1711 return returnval;
1713 def download_from_url_file_with_httplib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1714 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httplib", ranges, buffersize, sleep, timeout);
1715 return returnval;
1717 def download_from_url_file_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1718 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httplib2", ranges, buffersize, sleep, timeout);
1719 return returnval;
1721 def download_from_url_file_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1722 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib3", ranges, buffersize, sleep, timeout);
1723 return returnval;
1725 def download_from_url_file_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1726 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "requests", ranges, buffersize, sleep, timeout);
1727 return returnval;
1729 def download_from_url_file_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1730 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpx", ranges, buffersize, sleep, timeout);
1731 return returnval;
1733 def download_from_url_file_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1734 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpx2", ranges, buffersize, sleep, timeout);
1735 return returnval;
1737 def download_from_url_file_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1738 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpcore", ranges, buffersize, sleep, timeout);
1739 return returnval;
1741 def download_from_url_file_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1742 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpcore2", ranges, buffersize, sleep, timeout);
1743 return returnval;
1745 def download_from_url_file_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1746 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "mechanize", ranges, buffersize, sleep, timeout);
1747 return returnval;
1749 def download_from_url_file_with_pycurl(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1750 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl", ranges, buffersize, sleep, timeout);
1751 return returnval;
1753 def download_from_url_file_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1754 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl2", ranges, buffersize, sleep, timeout);
1755 return returnval;
1757 def download_from_url_file_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1758 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl3", ranges, buffersize, sleep, timeout);
1759 return returnval;
1761 def download_from_url_file_with_ftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1762 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "ftp", ranges, buffersize, sleep, timeout);
1763 return returnval;
1765 def download_from_url_file_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1766 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "sftp", ranges, buffersize, sleep, timeout);
1767 return returnval;
1769 def download_from_url_file_with_pysftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1770 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pysftp", ranges, buffersize, sleep, timeout);
1771 return returnval;
1773 def download_from_url_to_file_with_urllib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1774 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib", outfile, outpath, ranges, buffersize, sleep, timeout);
1775 return returnval;
1777 def download_from_url_to_file_with_request(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1778 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "request", outfile, outpath, ranges, buffersize, sleep, timeout);
1779 return returnval;
1781 def download_from_url_to_file_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1782 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib", outfile, outpath, ranges, buffersize, sleep, timeout);
1783 return returnval;
1785 def download_from_url_to_file_with_httplib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1786 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httplib", outfile, outpath, ranges, buffersize, sleep, timeout);
1787 return returnval;
1789 def download_from_url_to_file_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1790 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httplib2", outfile, outpath, ranges, buffersize, sleep, timeout);
1791 return returnval;
1793 def download_from_url_to_file_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1794 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib3", outfile, outpath, ranges, buffersize, sleep, timeout);
1795 return returnval;
1797 def download_from_url_to_file_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1798 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "requests", outfile, outpath, ranges, buffersize, sleep, timeout);
1799 return returnval;
1801 def download_from_url_to_file_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1802 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpx", outfile, outpath, ranges, buffersize, sleep, timeout);
1803 return returnval;
1805 def download_from_url_to_file_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1806 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpx2", outfile, outpath, ranges, buffersize, sleep, timeout);
1807 return returnval;
1809 def download_from_url_to_file_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1810 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpcore", outfile, outpath, ranges, buffersize, sleep, timeout);
1811 return returnval;
1813 def download_from_url_to_file_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1814 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpcore2", outfile, outpath, ranges, buffersize, sleep, timeout);
1815 return returnval;
1817 def download_from_url_to_file_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1818 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "mechanize", outfile, outpath, ranges, buffersize, sleep, timeout);
1819 return returnval;
1821 def download_from_url_to_file_with_pycurl(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1822 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl", outfile, outpath, ranges, buffersize, sleep, timeout);
1823 return returnval;
1825 def download_from_url_to_file_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1826 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl2", outfile, outpath, ranges, buffersize, sleep, timeout);
1827 return returnval;
1829 def download_from_url_to_file_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1830 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl3", outfile, outpath, ranges, buffersize, sleep, timeout);
1831 return returnval;
1833 def download_from_url_to_file_with_ftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1834 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "ftp", outfile, outpath, ranges, buffersize, sleep, timeout);
1835 return returnval;
1837 def download_from_url_to_file_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1838 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "sftp", outfile, outpath, ranges, buffersize, sleep, timeout);
1839 return returnval;
1841 def download_from_url_to_file_with_pysftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
1842 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pysftp", outfile, outpath, ranges, buffersize, sleep, timeout);
1843 return returnval;
1845 def download_file_from_ftp_file(url):
1846 urlparts = urlparse.urlparse(url);
1847 file_name = os.path.basename(urlparts.path);
1848 file_dir = os.path.dirname(urlparts.path);
1849 if(urlparts.username is not None):
1850 ftp_username = urlparts.username;
1851 else:
1852 ftp_username = "anonymous";
1853 if(urlparts.password is not None):
1854 ftp_password = urlparts.password;
1855 elif(urlparts.password is None and urlparts.username=="anonymous"):
1856 ftp_password = "anonymous";
1857 else:
1858 ftp_password = "";
1859 if(urlparts.scheme=="ftp"):
1860 ftp = FTP();
1861 elif(urlparts.scheme=="ftps"):
1862 ftp = FTP_TLS();
1863 else:
1864 return False;
1865 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
1866 return False;
1867 ftp_port = urlparts.port;
1868 if(urlparts.port is None):
1869 ftp_port = 21;
1870 try:
1871 ftp.connect(urlparts.hostname, ftp_port);
1872 except socket.gaierror:
1873 log.info("Error With URL "+httpurl);
1874 return False;
1875 except socket.timeout:
1876 log.info("Error With URL "+httpurl);
1877 return False;
1878 ftp.login(urlparts.username, urlparts.password);
1879 if(urlparts.scheme=="ftps"):
1880 ftp.prot_p();
1881 ftpfile = BytesIO();
1882 ftp.retrbinary("RETR "+urlparts.path, ftpfile.write);
1883 #ftp.storbinary("STOR "+urlparts.path, ftpfile.write);
1884 ftp.close();
1885 ftpfile.seek(0, 0);
1886 return ftpfile;
1888 def download_file_from_ftp_string(url):
1889 ftpfile = download_file_from_ftp_file(url);
1890 return ftpfile.read();
1892 def upload_file_to_ftp_file(ftpfile, url):
1893 urlparts = urlparse.urlparse(url);
1894 file_name = os.path.basename(urlparts.path);
1895 file_dir = os.path.dirname(urlparts.path);
1896 if(urlparts.username is not None):
1897 ftp_username = urlparts.username;
1898 else:
1899 ftp_username = "anonymous";
1900 if(urlparts.password is not None):
1901 ftp_password = urlparts.password;
1902 elif(urlparts.password is None and urlparts.username=="anonymous"):
1903 ftp_password = "anonymous";
1904 else:
1905 ftp_password = "";
1906 if(urlparts.scheme=="ftp"):
1907 ftp = FTP();
1908 elif(urlparts.scheme=="ftps"):
1909 ftp = FTP_TLS();
1910 else:
1911 return False;
1912 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
1913 return False;
1914 ftp_port = urlparts.port;
1915 if(urlparts.port is None):
1916 ftp_port = 21;
1917 try:
1918 ftp.connect(urlparts.hostname, ftp_port);
1919 except socket.gaierror:
1920 log.info("Error With URL "+httpurl);
1921 return False;
1922 except socket.timeout:
1923 log.info("Error With URL "+httpurl);
1924 return False;
1925 ftp.login(urlparts.username, urlparts.password);
1926 if(urlparts.scheme=="ftps"):
1927 ftp.prot_p();
1928 ftp.storbinary("STOR "+urlparts.path, ftpfile);
1929 ftp.close();
1930 ftpfile.seek(0, 0);
1931 return ftpfile;
1933 def upload_file_to_ftp_string(ftpstring, url):
1934 ftpfileo = BytesIO(ftpstring);
1935 ftpfile = upload_file_to_ftp_file(ftpfileo, url);
1936 ftpfileo.close();
1937 return ftpfile;
1939 if(haveparamiko):
1940 def download_file_from_sftp_file(url):
1941 urlparts = urlparse.urlparse(url);
1942 file_name = os.path.basename(urlparts.path);
1943 file_dir = os.path.dirname(urlparts.path);
1944 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
1945 return False;
1946 sftp_port = urlparts.port;
1947 if(urlparts.port is None):
1948 sftp_port = 22;
1949 else:
1950 sftp_port = urlparts.port;
1951 if(urlparts.username is not None):
1952 sftp_username = urlparts.username;
1953 else:
1954 sftp_username = "anonymous";
1955 if(urlparts.password is not None):
1956 sftp_password = urlparts.password;
1957 elif(urlparts.password is None and urlparts.username=="anonymous"):
1958 sftp_password = "anonymous";
1959 else:
1960 sftp_password = "";
1961 if(urlparts.scheme!="sftp"):
1962 return False;
1963 ssh = paramiko.SSHClient();
1964 ssh.load_system_host_keys();
1965 ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy());
1966 try:
1967 ssh.connect(urlparts.hostname, port=sftp_port, username=urlparts.username, password=urlparts.password);
1968 except paramiko.ssh_exception.SSHException:
1969 return False;
1970 except socket.gaierror:
1971 log.info("Error With URL "+httpurl);
1972 return False;
1973 except socket.timeout:
1974 log.info("Error With URL "+httpurl);
1975 return False;
1976 sftp = ssh.open_sftp();
1977 sftpfile = BytesIO();
1978 sftp.getfo(urlparts.path, sftpfile);
1979 sftp.close();
1980 ssh.close();
1981 sftpfile.seek(0, 0);
1982 return sftpfile;
1983 else:
1984 def download_file_from_sftp_file(url):
1985 return False;
1987 if(haveparamiko):
1988 def download_file_from_sftp_string(url):
1989 sftpfile = download_file_from_sftp_file(url);
1990 return sftpfile.read();
1991 else:
1992 def download_file_from_ftp_string(url):
1993 return False;
1995 if(haveparamiko):
1996 def upload_file_to_sftp_file(sftpfile, url):
1997 urlparts = urlparse.urlparse(url);
1998 file_name = os.path.basename(urlparts.path);
1999 file_dir = os.path.dirname(urlparts.path);
2000 sftp_port = urlparts.port;
2001 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
2002 return False;
2003 if(urlparts.port is None):
2004 sftp_port = 22;
2005 else:
2006 sftp_port = urlparts.port;
2007 if(urlparts.username is not None):
2008 sftp_username = urlparts.username;
2009 else:
2010 sftp_username = "anonymous";
2011 if(urlparts.password is not None):
2012 sftp_password = urlparts.password;
2013 elif(urlparts.password is None and urlparts.username=="anonymous"):
2014 sftp_password = "anonymous";
2015 else:
2016 sftp_password = "";
2017 if(urlparts.scheme!="sftp"):
2018 return False;
2019 ssh = paramiko.SSHClient();
2020 ssh.load_system_host_keys();
2021 ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy());
2022 try:
2023 ssh.connect(urlparts.hostname, port=sftp_port, username=urlparts.username, password=urlparts.password);
2024 except paramiko.ssh_exception.SSHException:
2025 return False;
2026 except socket.gaierror:
2027 log.info("Error With URL "+httpurl);
2028 return False;
2029 except socket.timeout:
2030 log.info("Error With URL "+httpurl);
2031 return False;
2032 sftp = ssh.open_sftp();
2033 sftp.putfo(sftpfile, urlparts.path);
2034 sftp.close();
2035 ssh.close();
2036 sftpfile.seek(0, 0);
2037 return sftpfile;
2038 else:
2039 def upload_file_to_sftp_file(sftpfile, url):
2040 return False;
2042 if(haveparamiko):
2043 def upload_file_to_sftp_string(sftpstring, url):
2044 sftpfileo = BytesIO(sftpstring);
2045 sftpfile = upload_file_to_sftp_files(ftpfileo, url);
2046 sftpfileo.close();
2047 return sftpfile;
2048 else:
2049 def upload_file_to_sftp_string(url):
2050 return False;
2053 if(havepysftp):
2054 def download_file_from_pysftp_file(url):
2055 urlparts = urlparse.urlparse(url);
2056 file_name = os.path.basename(urlparts.path);
2057 file_dir = os.path.dirname(urlparts.path);
2058 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
2059 return False;
2060 sftp_port = urlparts.port;
2061 if(urlparts.port is None):
2062 sftp_port = 22;
2063 else:
2064 sftp_port = urlparts.port;
2065 if(urlparts.username is not None):
2066 sftp_username = urlparts.username;
2067 else:
2068 sftp_username = "anonymous";
2069 if(urlparts.password is not None):
2070 sftp_password = urlparts.password;
2071 elif(urlparts.password is None and urlparts.username=="anonymous"):
2072 sftp_password = "anonymous";
2073 else:
2074 sftp_password = "";
2075 if(urlparts.scheme!="sftp"):
2076 return False;
2077 try:
2078 pysftp.Connection(urlparts.hostname, port=sftp_port, username=urlparts.username, password=urlparts.password);
2079 except paramiko.ssh_exception.SSHException:
2080 return False;
2081 except socket.gaierror:
2082 log.info("Error With URL "+httpurl);
2083 return False;
2084 except socket.timeout:
2085 log.info("Error With URL "+httpurl);
2086 return False;
2087 sftp = ssh.open_sftp();
2088 sftpfile = BytesIO();
2089 sftp.getfo(urlparts.path, sftpfile);
2090 sftp.close();
2091 ssh.close();
2092 sftpfile.seek(0, 0);
2093 return sftpfile;
2094 else:
2095 def download_file_from_pysftp_file(url):
2096 return False;
2098 if(havepysftp):
2099 def download_file_from_pysftp_string(url):
2100 sftpfile = download_file_from_pysftp_file(url);
2101 return sftpfile.read();
2102 else:
2103 def download_file_from_ftp_string(url):
2104 return False;
2106 if(havepysftp):
2107 def upload_file_to_pysftp_file(sftpfile, url):
2108 urlparts = urlparse.urlparse(url);
2109 file_name = os.path.basename(urlparts.path);
2110 file_dir = os.path.dirname(urlparts.path);
2111 sftp_port = urlparts.port;
2112 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
2113 return False;
2114 if(urlparts.port is None):
2115 sftp_port = 22;
2116 else:
2117 sftp_port = urlparts.port;
2118 if(urlparts.username is not None):
2119 sftp_username = urlparts.username;
2120 else:
2121 sftp_username = "anonymous";
2122 if(urlparts.password is not None):
2123 sftp_password = urlparts.password;
2124 elif(urlparts.password is None and urlparts.username=="anonymous"):
2125 sftp_password = "anonymous";
2126 else:
2127 sftp_password = "";
2128 if(urlparts.scheme!="sftp"):
2129 return False;
2130 try:
2131 pysftp.Connection(urlparts.hostname, port=sftp_port, username=urlparts.username, password=urlparts.password);
2132 except paramiko.ssh_exception.SSHException:
2133 return False;
2134 except socket.gaierror:
2135 log.info("Error With URL "+httpurl);
2136 return False;
2137 except socket.timeout:
2138 log.info("Error With URL "+httpurl);
2139 return False;
2140 sftp = ssh.open_sftp();
2141 sftp.putfo(sftpfile, urlparts.path);
2142 sftp.close();
2143 ssh.close();
2144 sftpfile.seek(0, 0);
2145 return sftpfile;
2146 else:
2147 def upload_file_to_pysftp_file(sftpfile, url):
2148 return False;
2150 if(havepysftp):
2151 def upload_file_to_pysftp_string(sftpstring, url):
2152 sftpfileo = BytesIO(sftpstring);
2153 sftpfile = upload_file_to_pysftp_files(ftpfileo, url);
2154 sftpfileo.close();
2155 return sftpfile;
2156 else:
2157 def upload_file_to_pysftp_string(url):
2158 return False;