Add files via upload
[PyWWW-Get.git] / pywwwget.py
blob635663ad2438e5c38e9d7eca8ec0076c12c04502
1 #!/usr/bin/env python
3 '''
4 This program is free software; you can redistribute it and/or modify
5 it under the terms of the Revised BSD License.
7 This program is distributed in the hope that it will be useful,
8 but WITHOUT ANY WARRANTY; without even the implied warranty of
9 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 Revised BSD License for more details.
12 Copyright 2016-2023 Game Maker 2k - https://github.com/GameMaker2k
13 Copyright 2016-2023 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
15 $FileInfo: pywwwget.py - Last Update: 9/30/2023 Ver. 1.7.0 RC 1 - Author: cooldude2k $
16 '''
18 from __future__ import division, absolute_import, print_function;
19 import re, os, sys, hashlib, shutil, platform, tempfile, urllib, gzip, time, argparse, cgi, subprocess, socket, email.utils, datetime, time;
20 import logging as log;
21 from ftplib import FTP, FTP_TLS;
22 from base64 import b64encode;
23 haverequests = False;
24 try:
25 import requests;
26 haverequests = True;
27 except ImportError:
28 haverequests = False;
29 havemechanize = False;
30 try:
31 import mechanize;
32 havemechanize = True;
33 except ImportError:
34 havemechanize = False;
35 havepycurl = False;
36 try:
37 import pycurl;
38 havepycurl = True;
39 except ImportError:
40 havepycurl = False;
41 haveparamiko = False;
42 try:
43 import paramiko;
44 haveparamiko = True;
45 except ImportError:
46 haveparamiko = False;
47 havepysftp = False;
48 try:
49 import pysftp;
50 havepysftp = True;
51 except ImportError:
52 havepysftp = False;
53 haveurllib3 = False;
54 try:
55 import urllib3;
56 haveurllib3 = True;
57 except ImportError:
58 haveurllib3 = False;
59 havehttplib2 = False;
60 try:
61 from httplib2 import HTTPConnectionWithTimeout, HTTPSConnectionWithTimeout;
62 havehttplib2 = True;
63 except ImportError:
64 havehttplib2 = False;
65 havehttpx = False;
66 try:
67 import httpx;
68 havehttpx = True;
69 except ImportError:
70 havehttpx = False;
71 havehttpcore = False;
72 try:
73 import httpcore;
74 havehttpcore = True;
75 except ImportError:
76 havehttpcore = False;
77 havebrotli = False;
78 try:
79 import brotli;
80 havebrotli = True;
81 except ImportError:
82 havebrotli = False;
83 havezstd = False;
84 try:
85 import zstandard;
86 havezstd = True;
87 except ImportError:
88 havezstd = False;
89 if(sys.version[0]=="2"):
90 try:
91 from io import StringIO, BytesIO;
92 except ImportError:
93 try:
94 from cStringIO import StringIO;
95 from cStringIO import StringIO as BytesIO;
96 except ImportError:
97 from StringIO import StringIO;
98 from StringIO import StringIO as BytesIO;
99 # From http://python-future.org/compatible_idioms.html
100 from urlparse import urlparse, urlunparse, urlsplit, urlunsplit, urljoin;
101 from urllib import urlencode;
102 from urllib import urlopen as urlopenalt;
103 from urllib2 import urlopen, Request, install_opener, HTTPError, URLError, build_opener, HTTPCookieProcessor;
104 import urlparse, cookielib;
105 from httplib import HTTPConnection, HTTPSConnection;
106 if(sys.version[0]>="3"):
107 from io import StringIO, BytesIO;
108 # From http://python-future.org/compatible_idioms.html
109 from urllib.parse import urlparse, urlunparse, urlsplit, urlunsplit, urljoin, urlencode;
110 from urllib.request import urlopen, Request, install_opener, build_opener, HTTPCookieProcessor;
111 from urllib.error import HTTPError, URLError;
112 import urllib.parse as urlparse;
113 import http.cookiejar as cookielib;
114 from http.client import HTTPConnection, HTTPSConnection;
116 __program_name__ = "PyWWW-Get";
117 __program_alt_name__ = "PyWWWGet";
118 __program_small_name__ = "wwwget";
119 __project__ = __program_name__;
120 __project_url__ = "https://github.com/GameMaker2k/PyWWW-Get";
121 __version_info__ = (1, 7, 0, "RC 1", 1);
122 __version_date_info__ = (2023, 9, 30, "RC 1", 1);
123 __version_date__ = str(__version_date_info__[0])+"."+str(__version_date_info__[1]).zfill(2)+"."+str(__version_date_info__[2]).zfill(2);
124 __revision__ = __version_info__[3];
125 __revision_id__ = "$Id$";
126 if(__version_info__[4] is not None):
127 __version_date_plusrc__ = __version_date__+"-"+str(__version_date_info__[4]);
128 if(__version_info__[4] is None):
129 __version_date_plusrc__ = __version_date__;
130 if(__version_info__[3] is not None):
131 __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2])+" "+str(__version_info__[3]);
132 if(__version_info__[3] is None):
133 __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]);
135 tmpfileprefix = "py"+str(sys.version_info[0])+__program_small_name__+str(__version_info__[0])+"-";
136 tmpfilesuffix = "-";
137 pytempdir = tempfile.gettempdir();
139 PyBitness = platform.architecture();
140 if(PyBitness=="32bit" or PyBitness=="32"):
141 PyBitness = "32";
142 elif(PyBitness=="64bit" or PyBitness=="64"):
143 PyBitness = "64";
144 else:
145 PyBitness = "32";
147 compression_supported = "gzip, deflate";
148 if(havebrotli):
149 compression_supported = "gzip, deflate, br";
150 else:
151 compression_supported = "gzip, deflate";
153 geturls_cj = cookielib.CookieJar();
154 windowsNT4_ua_string = "Windows NT 4.0";
155 windowsNT4_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "4.0.0"};
156 windows2k_ua_string = "Windows NT 5.0";
157 windows2k_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "5.0.0"};
158 windowsXP_ua_string = "Windows NT 5.1";
159 windowsXP_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "5.1.0"};
160 windowsXP64_ua_string = "Windows NT 5.2; Win64; x64";
161 windowsXP64_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "5.1.0"};
162 windows7_ua_string = "Windows NT 6.1; Win64; x64";
163 windows7_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "6.1.0"};
164 windows8_ua_string = "Windows NT 6.2; Win64; x64";
165 windows8_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "6.2.0"};
166 windows81_ua_string = "Windows NT 6.3; Win64; x64";
167 windows81_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "6.3.0"};
168 windows10_ua_string = "Windows NT 10.0; Win64; x64";
169 windows10_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "10.0.0"};
170 windows11_ua_string = "Windows NT 11.0; Win64; x64";
171 windows11_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "11.0.0"};
172 geturls_ua_firefox_windows7 = "Mozilla/5.0 ("+windows7_ua_string+"; rv:109.0) Gecko/20100101 Firefox/117.0";
173 geturls_ua_seamonkey_windows7 = "Mozilla/5.0 ("+windows7_ua_string+"; rv:91.0) Gecko/20100101 Firefox/91.0 SeaMonkey/2.53.17";
174 geturls_ua_chrome_windows7 = "Mozilla/5.0 ("+windows7_ua_string+") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36";
175 geturls_ua_chromium_windows7 = "Mozilla/5.0 ("+windows7_ua_string+") AppleWebKit/537.36 (KHTML, like Gecko) Chromium/117.0.0.0 Chrome/117.0.0.0 Safari/537.36";
176 geturls_ua_palemoon_windows7 = "Mozilla/5.0 ("+windows7_ua_string+"; rv:102.0) Gecko/20100101 Goanna/6.3 Firefox/102.0 PaleMoon/32.4.0.1";
177 geturls_ua_opera_windows7 = "Mozilla/5.0 ("+windows7_ua_string+") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36 OPR/102.0.0.0";
178 geturls_ua_vivaldi_windows7 = "Mozilla/5.0 ("+windows7_ua_string+") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36 Vivaldi/6.2.3105.48";
179 geturls_ua_internet_explorer_windows7 = "Mozilla/5.0 ("+windows7_ua_string+"; Trident/7.0; rv:11.0) like Gecko";
180 geturls_ua_microsoft_edge_windows7 = "Mozilla/5.0 ("+windows7_ua_string+") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36 Edg/117.0.2045.31";
181 geturls_ua_pywwwget_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prourl})".format(proname=__project__, prover=__version__, prourl=__project_url__);
182 if(platform.python_implementation()!=""):
183 py_implementation = platform.python_implementation();
184 if(platform.python_implementation()==""):
185 py_implementation = "Python";
186 geturls_ua_pywwwget_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system()+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__);
187 geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)";
188 geturls_ua_googlebot_google_old = "Googlebot/2.1 (+http://www.google.com/bot.html)";
189 geturls_ua = geturls_ua_firefox_windows7;
190 geturls_headers_firefox_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_firefox_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
191 geturls_headers_seamonkey_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_seamonkey_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
192 geturls_headers_chrome_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_chrome_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Google Chrome\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Chromium\";v=\"117\"", 'SEC-CH-UA-FULL-VERSION': "117.0.5938.63"};
193 geturls_headers_chrome_windows7.update(windows7_ua_addon);
194 geturls_headers_chromium_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_chromium_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Chromium\";v=\"117\", \"Not;A=Brand\";v=\"24\"", 'SEC-CH-UA-FULL-VERSION': "117.0.5938.63"};
195 geturls_headers_chromium_windows7.update(windows7_ua_addon);
196 geturls_headers_palemoon_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_palemoon_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
197 geturls_headers_opera_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_opera_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Chromium\";v=\"116\", \"Not;A=Brand\";v=\"8\", \"Opera\";v=\"102\"", 'SEC-CH-UA-FULL-VERSION': "102.0.4880.56"};
198 geturls_headers_opera_windows7.update(windows7_ua_addon);
199 geturls_headers_vivaldi_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_vivaldi_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Google Chrome\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Vivaldi\";v=\"6.2\"", 'SEC-CH-UA-FULL-VERSION': "6.2.3105.48"};
200 geturls_headers_vivaldi_windows7.update(windows7_ua_addon);
201 geturls_headers_internet_explorer_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_internet_explorer_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
202 geturls_headers_microsoft_edge_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_microsoft_edge_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Microsoft Edge\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Chromium\";v=\"117\"", 'SEC-CH-UA-FULL-VERSION': "117.0.2045.31"}
203 geturls_headers_microsoft_edge_windows7.update(windows7_ua_addon);
204 geturls_headers_pywwwget_python = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_pywwwget_python, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\""+__project__+"\";v=\""+str(__version__)+"\", \"Not;A=Brand\";v=\"8\", \""+py_implementation+"\";v=\""+str(platform.release())+"\"", 'SEC-CH-UA-FULL-VERSION': str(__version__), 'SEC-CH-UA-PLATFORM': ""+py_implementation+"", 'SEC-CH-UA-ARCH': ""+platform.machine()+"", 'SEC-CH-UA-PLATFORM': str(__version__), 'SEC-CH-UA-BITNESS': str(PyBitness)};
205 geturls_headers_pywwwget_python_alt = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_pywwwget_python_alt, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\""+__project__+"\";v=\""+str(__version__)+"\", \"Not;A=Brand\";v=\"8\", \""+py_implementation+"\";v=\""+str(platform.release())+"\"", 'SEC-CH-UA-FULL-VERSION': str(__version__), 'SEC-CH-UA-PLATFORM': ""+py_implementation+"", 'SEC-CH-UA-ARCH': ""+platform.machine()+"", 'SEC-CH-UA-PLATFORM': str(__version__), 'SEC-CH-UA-BITNESS': str(PyBitness)};
206 geturls_headers_googlebot_google = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_googlebot_google, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
207 geturls_headers_googlebot_google_old = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_googlebot_google_old, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
208 geturls_headers = geturls_headers_firefox_windows7;
209 geturls_download_sleep = 0;
211 def verbose_printout(dbgtxt, outtype="log", dbgenable=True, dgblevel=20):
212 if(outtype=="print" and dbgenable):
213 print(dbgtxt);
214 return True;
215 elif(outtype=="log" and dbgenable):
216 logging.info(dbgtxt);
217 return True;
218 elif(outtype=="warning" and dbgenable):
219 logging.warning(dbgtxt);
220 return True;
221 elif(outtype=="error" and dbgenable):
222 logging.error(dbgtxt);
223 return True;
224 elif(outtype=="critical" and dbgenable):
225 logging.critical(dbgtxt);
226 return True;
227 elif(outtype=="exception" and dbgenable):
228 logging.exception(dbgtxt);
229 return True;
230 elif(outtype=="logalt" and dbgenable):
231 logging.log(dgblevel, dbgtxt);
232 return True;
233 elif(outtype=="debug" and dbgenable):
234 logging.debug(dbgtxt);
235 return True;
236 elif(not dbgenable):
237 return True;
238 else:
239 return False;
240 return False;
242 def verbose_printout_return(dbgtxt, outtype="log", dbgenable=True, dgblevel=20):
243 dbgout = verbose_printout(dbgtxt, outtype, dbgenable, dgblevel);
244 if(not dbgout):
245 return False;
246 return dbgtxt;
248 def add_url_param(url, **params):
249 n=3;
250 parts = list(urlparse.urlsplit(url));
251 d = dict(cgi.parse_qsl(parts[n])); # use cgi.parse_qs for list values
252 d.update(params);
253 parts[n]=urlencode(d);
254 return urlparse.urlunsplit(parts);
256 os.environ["PATH"] = os.environ["PATH"] + os.pathsep + os.path.dirname(os.path.realpath(__file__)) + os.pathsep + os.getcwd();
257 def which_exec(execfile):
258 for path in os.environ["PATH"].split(":"):
259 if os.path.exists(path + "/" + execfile):
260 return path + "/" + execfile;
262 def listize(varlist):
263 il = 0;
264 ix = len(varlist);
265 ilx = 1;
266 newlistreg = {};
267 newlistrev = {};
268 newlistfull = {};
269 while(il < ix):
270 newlistreg.update({ilx: varlist[il]});
271 newlistrev.update({varlist[il]: ilx});
272 ilx = ilx + 1;
273 il = il + 1;
274 newlistfull = {1: newlistreg, 2: newlistrev, 'reg': newlistreg, 'rev': newlistrev};
275 return newlistfull;
277 def twolistize(varlist):
278 il = 0;
279 ix = len(varlist);
280 ilx = 1;
281 newlistnamereg = {};
282 newlistnamerev = {};
283 newlistdescreg = {};
284 newlistdescrev = {};
285 newlistfull = {};
286 while(il < ix):
287 newlistnamereg.update({ilx: varlist[il][0].strip()});
288 newlistnamerev.update({varlist[il][0].strip(): ilx});
289 newlistdescreg.update({ilx: varlist[il][1].strip()});
290 newlistdescrev.update({varlist[il][1].strip(): ilx});
291 ilx = ilx + 1;
292 il = il + 1;
293 newlistnametmp = {1: newlistnamereg, 2: newlistnamerev, 'reg': newlistnamereg, 'rev': newlistnamerev};
294 newlistdesctmp = {1: newlistdescreg, 2: newlistdescrev, 'reg': newlistdescreg, 'rev': newlistdescrev};
295 newlistfull = {1: newlistnametmp, 2: newlistdesctmp, 'name': newlistnametmp, 'desc': newlistdesctmp}
296 return newlistfull;
298 def arglistize(proexec, *varlist):
299 il = 0;
300 ix = len(varlist);
301 ilx = 1;
302 newarglist = [proexec];
303 while(il < ix):
304 if varlist[il][0] is not None:
305 newarglist.append(varlist[il][0]);
306 if varlist[il][1] is not None:
307 newarglist.append(varlist[il][1]);
308 il = il + 1;
309 return newarglist;
311 def fix_header_names(header_dict):
312 if(sys.version[0]=="2"):
313 header_dict = {k.title(): v for k, v in header_dict.iteritems()};
314 if(sys.version[0]>="3"):
315 header_dict = {k.title(): v for k, v in header_dict.items()};
316 return header_dict;
318 # hms_string by ArcGIS Python Recipes
319 # https://arcpy.wordpress.com/2012/04/20/146/
320 def hms_string(sec_elapsed):
321 h = int(sec_elapsed / (60 * 60));
322 m = int((sec_elapsed % (60 * 60)) / 60);
323 s = sec_elapsed % 60.0;
324 return "{}:{:>02}:{:>05.2f}".format(h, m, s);
326 # get_readable_size by Lipis
327 # http://stackoverflow.com/posts/14998888/revisions
328 def get_readable_size(bytes, precision=1, unit="IEC"):
329 unit = unit.upper();
330 if(unit!="IEC" and unit!="SI"):
331 unit = "IEC";
332 if(unit=="IEC"):
333 units = [" B"," KiB"," MiB"," GiB"," TiB"," PiB"," EiB"," ZiB"];
334 unitswos = ["B","KiB","MiB","GiB","TiB","PiB","EiB","ZiB"];
335 unitsize = 1024.0;
336 if(unit=="SI"):
337 units = [" B"," kB"," MB"," GB"," TB"," PB"," EB"," ZB"];
338 unitswos = ["B","kB","MB","GB","TB","PB","EB","ZB"];
339 unitsize = 1000.0;
340 return_val = {};
341 orgbytes = bytes;
342 for unit in units:
343 if abs(bytes) < unitsize:
344 strformat = "%3."+str(precision)+"f%s";
345 pre_return_val = (strformat % (bytes, unit));
346 pre_return_val = re.sub(r"([0]+) ([A-Za-z]+)", r" \2", pre_return_val);
347 pre_return_val = re.sub(r"\. ([A-Za-z]+)", r" \1", pre_return_val);
348 alt_return_val = pre_return_val.split();
349 return_val = {'Bytes': orgbytes, 'ReadableWithSuffix': pre_return_val, 'ReadableWithoutSuffix': alt_return_val[0], 'ReadableSuffix': alt_return_val[1]}
350 return return_val;
351 bytes /= unitsize;
352 strformat = "%."+str(precision)+"f%s";
353 pre_return_val = (strformat % (bytes, "YiB"));
354 pre_return_val = re.sub(r"([0]+) ([A-Za-z]+)", r" \2", pre_return_val);
355 pre_return_val = re.sub(r"\. ([A-Za-z]+)", r" \1", pre_return_val);
356 alt_return_val = pre_return_val.split();
357 return_val = {'Bytes': orgbytes, 'ReadableWithSuffix': pre_return_val, 'ReadableWithoutSuffix': alt_return_val[0], 'ReadableSuffix': alt_return_val[1]}
358 return return_val;
360 def get_readable_size_from_file(infile, precision=1, unit="IEC", usehashes=False, usehashtypes="md5,sha1"):
361 unit = unit.upper();
362 usehashtypes = usehashtypes.lower();
363 getfilesize = os.path.getsize(infile);
364 return_val = get_readable_size(getfilesize, precision, unit);
365 if(usehashes):
366 hashtypelist = usehashtypes.split(",");
367 openfile = open(infile, "rb");
368 filecontents = openfile.read();
369 openfile.close();
370 listnumcount = 0;
371 listnumend = len(hashtypelist);
372 while(listnumcount < listnumend):
373 hashtypelistlow = hashtypelist[listnumcount].strip();
374 hashtypelistup = hashtypelistlow.upper();
375 filehash = hashlib.new(hashtypelistup);
376 filehash.update(filecontents);
377 filegethash = filehash.hexdigest();
378 return_val.update({hashtypelistup: filegethash});
379 listnumcount += 1;
380 return return_val;
382 def get_readable_size_from_string(instring, precision=1, unit="IEC", usehashes=False, usehashtypes="md5,sha1"):
383 unit = unit.upper();
384 usehashtypes = usehashtypes.lower();
385 getfilesize = len(instring);
386 return_val = get_readable_size(getfilesize, precision, unit);
387 if(usehashes):
388 hashtypelist = usehashtypes.split(",");
389 listnumcount = 0;
390 listnumend = len(hashtypelist);
391 while(listnumcount < listnumend):
392 hashtypelistlow = hashtypelist[listnumcount].strip();
393 hashtypelistup = hashtypelistlow.upper();
394 filehash = hashlib.new(hashtypelistup);
395 if(sys.version[0]=="2"):
396 filehash.update(instring);
397 if(sys.version[0]>="3"):
398 filehash.update(instring.encode('utf-8'));
399 filegethash = filehash.hexdigest();
400 return_val.update({hashtypelistup: filegethash});
401 listnumcount += 1;
402 return return_val;
404 def http_status_to_reason(code):
405 reasons = {
406 100: 'Continue',
407 101: 'Switching Protocols',
408 102: 'Processing',
409 200: 'OK',
410 201: 'Created',
411 202: 'Accepted',
412 203: 'Non-Authoritative Information',
413 204: 'No Content',
414 205: 'Reset Content',
415 206: 'Partial Content',
416 207: 'Multi-Status',
417 208: 'Already Reported',
418 226: 'IM Used',
419 300: 'Multiple Choices',
420 301: 'Moved Permanently',
421 302: 'Found',
422 303: 'See Other',
423 304: 'Not Modified',
424 305: 'Use Proxy',
425 307: 'Temporary Redirect',
426 308: 'Permanent Redirect',
427 400: 'Bad Request',
428 401: 'Unauthorized',
429 402: 'Payment Required',
430 403: 'Forbidden',
431 404: 'Not Found',
432 405: 'Method Not Allowed',
433 406: 'Not Acceptable',
434 407: 'Proxy Authentication Required',
435 408: 'Request Timeout',
436 409: 'Conflict',
437 410: 'Gone',
438 411: 'Length Required',
439 412: 'Precondition Failed',
440 413: 'Payload Too Large',
441 414: 'URI Too Long',
442 415: 'Unsupported Media Type',
443 416: 'Range Not Satisfiable',
444 417: 'Expectation Failed',
445 421: 'Misdirected Request',
446 422: 'Unprocessable Entity',
447 423: 'Locked',
448 424: 'Failed Dependency',
449 426: 'Upgrade Required',
450 428: 'Precondition Required',
451 429: 'Too Many Requests',
452 431: 'Request Header Fields Too Large',
453 451: 'Unavailable For Legal Reasons',
454 500: 'Internal Server Error',
455 501: 'Not Implemented',
456 502: 'Bad Gateway',
457 503: 'Service Unavailable',
458 504: 'Gateway Timeout',
459 505: 'HTTP Version Not Supported',
460 506: 'Variant Also Negotiates',
461 507: 'Insufficient Storage',
462 508: 'Loop Detected',
463 510: 'Not Extended',
464 511: 'Network Authentication Required'
466 return reasons.get(code, 'Unknown Status Code');
468 def ftp_status_to_reason(code):
469 reasons = {
470 110: 'Restart marker reply',
471 120: 'Service ready in nnn minutes',
472 125: 'Data connection already open; transfer starting',
473 150: 'File status okay; about to open data connection',
474 200: 'Command okay',
475 202: 'Command not implemented, superfluous at this site',
476 211: 'System status, or system help reply',
477 212: 'Directory status',
478 213: 'File status',
479 214: 'Help message',
480 215: 'NAME system type',
481 220: 'Service ready for new user',
482 221: 'Service closing control connection',
483 225: 'Data connection open; no transfer in progress',
484 226: 'Closing data connection',
485 227: 'Entering Passive Mode',
486 230: 'User logged in, proceed',
487 250: 'Requested file action okay, completed',
488 257: '"PATHNAME" created',
489 331: 'User name okay, need password',
490 332: 'Need account for login',
491 350: 'Requested file action pending further information',
492 421: 'Service not available, closing control connection',
493 425: 'Can\'t open data connection',
494 426: 'Connection closed; transfer aborted',
495 450: 'Requested file action not taken',
496 451: 'Requested action aborted. Local error in processing',
497 452: 'Requested action not taken. Insufficient storage space in system',
498 500: 'Syntax error, command unrecognized',
499 501: 'Syntax error in parameters or arguments',
500 502: 'Command not implemented',
501 503: 'Bad sequence of commands',
502 504: 'Command not implemented for that parameter',
503 530: 'Not logged in',
504 532: 'Need account for storing files',
505 550: 'Requested action not taken. File unavailable',
506 551: 'Requested action aborted. Page type unknown',
507 552: 'Requested file action aborted. Exceeded storage allocation',
508 553: 'Requested action not taken. File name not allowed'
510 return reasons.get(code, 'Unknown Status Code');
512 def sftp_status_to_reason(code):
513 reasons = {
514 0: 'SSH_FX_OK',
515 1: 'SSH_FX_EOF',
516 2: 'SSH_FX_NO_SUCH_FILE',
517 3: 'SSH_FX_PERMISSION_DENIED',
518 4: 'SSH_FX_FAILURE',
519 5: 'SSH_FX_BAD_MESSAGE',
520 6: 'SSH_FX_NO_CONNECTION',
521 7: 'SSH_FX_CONNECTION_LOST',
522 8: 'SSH_FX_OP_UNSUPPORTED'
524 return reasons.get(code, 'Unknown Status Code');
526 def make_http_headers_from_dict_to_list(headers={'Referer': "http://google.com/", 'User-Agent': geturls_ua, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"}):
527 if isinstance(headers, dict):
528 returnval = [];
529 if(sys.version[0]=="2"):
530 for headkey, headvalue in headers.iteritems():
531 returnval.append((headkey, headvalue));
532 if(sys.version[0]>="3"):
533 for headkey, headvalue in headers.items():
534 returnval.append((headkey, headvalue));
535 elif isinstance(headers, list):
536 returnval = headers;
537 else:
538 returnval = False;
539 return returnval;
541 def make_http_headers_from_dict_to_pycurl(headers={'Referer': "http://google.com/", 'User-Agent': geturls_ua, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"}):
542 if isinstance(headers, dict):
543 returnval = [];
544 if(sys.version[0]=="2"):
545 for headkey, headvalue in headers.iteritems():
546 returnval.append(headkey+": "+headvalue);
547 if(sys.version[0]>="3"):
548 for headkey, headvalue in headers.items():
549 returnval.append(headkey+": "+headvalue);
550 elif isinstance(headers, list):
551 returnval = headers;
552 else:
553 returnval = False;
554 return returnval;
556 def make_http_headers_from_pycurl_to_dict(headers):
557 header_dict = {};
558 headers = headers.strip().split('\r\n');
559 for header in headers:
560 parts = header.split(': ', 1)
561 if(len(parts) == 2):
562 key, value = parts;
563 header_dict[key.title()] = value;
564 return header_dict;
566 def make_http_headers_from_list_to_dict(headers=[("Referer", "http://google.com/"), ("User-Agent", geturls_ua), ("Accept-Encoding", compression_supported), ("Accept-Language", "en-US,en;q=0.8,en-CA,en-GB;q=0.6"), ("Accept-Charset", "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7"), ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), ("Connection", "close")]):
567 if isinstance(headers, list):
568 returnval = {};
569 mli = 0;
570 mlil = len(headers);
571 while(mli<mlil):
572 returnval.update({headers[mli][0]: headers[mli][1]});
573 mli = mli + 1;
574 elif isinstance(headers, dict):
575 returnval = headers;
576 else:
577 returnval = False;
578 return returnval;
580 def get_httplib_support(checkvalue=None):
581 global haverequests, havemechanize, havehttplib2, haveurllib3, havehttpx, havehttpcore, haveparamiko, havepysftp;
582 returnval = [];
583 returnval.append("ftp");
584 returnval.append("httplib");
585 if(havehttplib2):
586 returnval.append("httplib2");
587 returnval.append("urllib");
588 if(haveurllib3):
589 returnval.append("urllib3");
590 returnval.append("request3");
591 returnval.append("request");
592 if(haverequests):
593 returnval.append("requests");
594 if(havehttpx):
595 returnval.append("httpx");
596 returnval.append("httpx2");
597 if(havemechanize):
598 returnval.append("mechanize");
599 if(havepycurl):
600 returnval.append("pycurl");
601 returnval.append("pycurl2");
602 returnval.append("pycurl3");
603 if(haveparamiko):
604 returnval.append("sftp");
605 if(havepysftp):
606 returnval.append("pysftp");
607 if(not checkvalue is None):
608 if(checkvalue=="urllib1" or checkvalue=="urllib2"):
609 checkvalue = "urllib";
610 if(checkvalue=="httplib1"):
611 checkvalue = "httplib";
612 if(checkvalue in returnval):
613 returnval = True;
614 else:
615 returnval = False;
616 return returnval;
618 def check_httplib_support(checkvalue="urllib"):
619 if(checkvalue=="urllib1" or checkvalue=="urllib2"):
620 checkvalue = "urllib";
621 if(checkvalue=="httplib1"):
622 checkvalue = "httplib";
623 returnval = get_httplib_support(checkvalue);
624 return returnval;
626 def get_httplib_support_list():
627 returnval = get_httplib_support(None);
628 return returnval;
630 def download_from_url(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", buffersize=524288, sleep=-1, timeout=10):
631 global geturls_download_sleep, haverequests, havemechanize, havepycurl, havehttplib2, haveurllib3, havehttpx, havehttpcore, haveparamiko, havepysftp;
632 if(sleep<0):
633 sleep = geturls_download_sleep;
634 if(timeout<=0):
635 timeout = 10;
636 if(httplibuse=="urllib1" or httplibuse=="urllib2" or httplibuse=="request"):
637 httplibuse = "urllib";
638 if(httplibuse=="httplib1"):
639 httplibuse = "httplib";
640 if(not haverequests and httplibuse=="requests"):
641 httplibuse = "urllib";
642 if(not havehttpx and httplibuse=="httpx"):
643 httplibuse = "urllib";
644 if(not havehttpx and httplibuse=="httpx2"):
645 httplibuse = "urllib";
646 if(not havehttpcore and httplibuse=="httpcore"):
647 httplibuse = "urllib";
648 if(not havehttpcore and httplibuse=="httpcore2"):
649 httplibuse = "urllib";
650 if(not havemechanize and httplibuse=="mechanize"):
651 httplibuse = "urllib";
652 if(not havepycurl and httplibuse=="pycurl"):
653 httplibuse = "urllib";
654 if(not havepycurl and httplibuse=="pycurl2"):
655 httplibuse = "urllib";
656 if(not havepycurl and httplibuse=="pycurl3"):
657 httplibuse = "urllib";
658 if(not havehttplib2 and httplibuse=="httplib2"):
659 httplibuse = "httplib";
660 if(not haveparamiko and httplibuse=="sftp"):
661 httplibuse = "ftp";
662 if(not havepysftp and httplibuse=="pysftp"):
663 httplibuse = "ftp";
664 urlparts = urlparse.urlparse(httpurl);
665 if(isinstance(httpheaders, list)):
666 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
667 httpheaders = fix_header_names(httpheaders);
668 if(httpuseragent is not None):
669 if('User-Agent' in httpheaders):
670 httpheaders['User-Agent'] = httpuseragent;
671 else:
672 httpuseragent.update({'User-Agent': httpuseragent});
673 if(httpreferer is not None):
674 if('Referer' in httpheaders):
675 httpheaders['Referer'] = httpreferer;
676 else:
677 httpuseragent.update({'Referer': httpreferer});
678 if(urlparts.username is not None or urlparts.password is not None):
679 if(sys.version[0]=="2"):
680 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password));
681 if(sys.version[0]>="3"):
682 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
683 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
684 geturls_opener = build_opener(HTTPCookieProcessor(httpcookie));
685 if(httplibuse=="urllib" or httplibuse=="mechanize"):
686 if(isinstance(httpheaders, dict)):
687 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
688 if(httplibuse=="pycurl" or httplibuse=="pycurl2" or httplibuse=="pycurl3"):
689 if(isinstance(httpheaders, dict)):
690 httpheaders = make_http_headers_from_dict_to_pycurl(httpheaders);
691 geturls_opener.addheaders = httpheaders;
692 time.sleep(sleep);
693 if(postdata is not None and not isinstance(postdata, dict)):
694 postdata = urlencode(postdata);
695 if(httplibuse=="urllib" or httplibuse=="request"):
696 geturls_request = Request(httpurl);
697 try:
698 if(httpmethod=="GET"):
699 geturls_text = geturls_opener.open(geturls_request);
700 elif(httpmethod=="POST"):
701 geturls_text = geturls_opener.open(geturls_request, data=postdata);
702 else:
703 geturls_text = geturls_opener.open(geturls_request);
704 except HTTPError as geturls_text_error:
705 geturls_text = geturls_text_error;
706 log.info("Error With URL "+httpurl);
707 except URLError:
708 log.info("Error With URL "+httpurl);
709 return False;
710 except socket.timeout:
711 log.info("Error With URL "+httpurl);
712 return False;
713 httpcodeout = geturls_text.getcode();
714 try:
715 httpcodereason = geturls_text.reason;
716 except AttributeError:
717 httpcodereason = http_status_to_reason(geturls_text.getcode());
718 try:
719 httpversionout = geturls_text.version;
720 except AttributeError:
721 httpversionout = "1.1";
722 httpmethodout = geturls_request.get_method();
723 httpurlout = geturls_text.geturl();
724 httpheaderout = geturls_text.info();
725 httpheadersentout = httpheaders;
726 elif(httplibuse=="httplib"):
727 if(urlparts[0]=="http"):
728 httpconn = HTTPConnection(urlparts[1], timeout=timeout);
729 elif(urlparts[0]=="https"):
730 httpconn = HTTPSConnection(urlparts[1], timeout=timeout);
731 else:
732 return False;
733 if(postdata is not None and not isinstance(postdata, dict)):
734 postdata = urlencode(postdata);
735 try:
736 if(httpmethod=="GET"):
737 httpconn.request("GET", urlparts[2], headers=httpheaders);
738 elif(httpmethod=="POST"):
739 httpconn.request("GET", urlparts[2], body=postdata, headers=httpheaders);
740 else:
741 httpconn.request("GET", urlparts[2], headers=httpheaders);
742 except socket.timeout:
743 log.info("Error With URL "+httpurl);
744 return False;
745 except socket.gaierror:
746 log.info("Error With URL "+httpurl);
747 return False;
748 except BlockingIOError:
749 log.info("Error With URL "+httpurl);
750 return False;
751 geturls_text = httpconn.getresponse();
752 httpcodeout = geturls_text.status;
753 httpcodereason = geturls_text.reason;
754 if(geturls_text.version=="10"):
755 httpversionout = "1.0";
756 else:
757 httpversionout = "1.1";
758 httpmethodout = geturls_text._method;
759 httpurlout = httpurl;
760 httpheaderout = geturls_text.getheaders();
761 httpheadersentout = httpheaders;
762 elif(httplibuse=="httplib2"):
763 if(urlparts[0]=="http"):
764 httpconn = HTTPConnectionWithTimeout(urlparts[1], timeout=timeout);
765 elif(urlparts[0]=="https"):
766 httpconn = HTTPSConnectionWithTimeout(urlparts[1], timeout=timeout);
767 else:
768 return False;
769 if(postdata is not None and not isinstance(postdata, dict)):
770 postdata = urlencode(postdata);
771 try:
772 if(httpmethod=="GET"):
773 httpconn.request("GET", urlparts[2], headers=httpheaders);
774 elif(httpmethod=="POST"):
775 httpconn.request("GET", urlparts[2], body=postdata, headers=httpheaders);
776 else:
777 httpconn.request("GET", urlparts[2], headers=httpheaders);
778 except socket.timeout:
779 log.info("Error With URL "+httpurl);
780 return False;
781 except socket.gaierror:
782 log.info("Error With URL "+httpurl);
783 return False;
784 except BlockingIOError:
785 log.info("Error With URL "+httpurl);
786 return False;
787 geturls_text = httpconn.getresponse();
788 httpcodeout = geturls_text.status;
789 httpcodereason = geturls_text.reason;
790 if(geturls_text.version=="10"):
791 httpversionout = "1.0";
792 else:
793 httpversionout = "1.1";
794 httpmethodout = httpmethod;
795 httpurlout = httpurl;
796 httpheaderout = geturls_text.getheaders();
797 httpheadersentout = httpheaders;
798 elif(httplibuse=="urllib3" or httplibuse=="request3"):
799 timeout = urllib3.util.Timeout(connect=timeout, read=timeout);
800 urllib_pool = urllib3.PoolManager(headers=httpheaders, timeout=timeout);
801 try:
802 if(httpmethod=="GET"):
803 geturls_text = urllib_pool.request("GET", httpurl, headers=httpheaders, preload_content=False);
804 elif(httpmethod=="POST"):
805 geturls_text = urllib_pool.request("POST", httpurl, body=postdata, headers=httpheaders, preload_content=False);
806 else:
807 geturls_text = urllib_pool.request("GET", httpurl, headers=httpheaders, preload_content=False);
808 except urllib3.exceptions.ConnectTimeoutError:
809 log.info("Error With URL "+httpurl);
810 return False;
811 except urllib3.exceptions.ConnectError:
812 log.info("Error With URL "+httpurl);
813 return False;
814 except urllib3.exceptions.MaxRetryError:
815 log.info("Error With URL "+httpurl);
816 return False;
817 except socket.timeout:
818 log.info("Error With URL "+httpurl);
819 return False;
820 except ValueError:
821 log.info("Error With URL "+httpurl);
822 return False;
823 httpcodeout = geturls_text.status;
824 httpcodereason = geturls_text.reason;
825 if(geturls_text.version=="10"):
826 httpversionout = "1.0";
827 else:
828 httpversionout = "1.1";
829 httpmethodout = httpmethod;
830 httpurlout = geturls_text.geturl();
831 httpheaderout = geturls_text.info();
832 httpheadersentout = httpheaders;
833 elif(httplibuse=="requests"):
834 try:
835 reqsession = requests.Session();
836 if(httpmethod=="GET"):
837 geturls_text = reqsession.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie);
838 elif(httpmethod=="POST"):
839 geturls_text = reqsession.post(httpurl, timeout=timeout, data=postdata, headers=httpheaders, cookies=httpcookie);
840 else:
841 geturls_text = reqsession.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie);
842 except requests.exceptions.ConnectTimeout:
843 log.info("Error With URL "+httpurl);
844 return False;
845 except requests.exceptions.ConnectError:
846 log.info("Error With URL "+httpurl);
847 return False;
848 except socket.timeout:
849 log.info("Error With URL "+httpurl);
850 return False;
851 httpcodeout = geturls_text.status_code;
852 httpcodereason = geturls_text.reason;
853 if(geturls_text.raw.version=="10"):
854 httpversionout = "1.0";
855 else:
856 httpversionout = "1.1";
857 httpmethodout = httpmethod;
858 httpurlout = geturls_text.url;
859 httpheaderout = geturls_text.headers;
860 httpheadersentout = geturls_text.request.headers;
861 elif(httplibuse=="httpx"):
862 try:
863 if(httpmethod=="GET"):
864 httpx_pool = httpx.Client(http1=True, http2=False, trust_env=True);
865 geturls_text = httpx_pool.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie);
866 elif(httpmethod=="POST"):
867 httpx_pool = httpx.Client(http1=True, http2=False, trust_env=True);
868 geturls_text = httpx_pool.post(httpurl, timeout=timeout, data=postdata, headers=httpheaders, cookies=httpcookie);
869 else:
870 httpx_pool = httpx.Client(http1=True, http2=False, trust_env=True);
871 geturls_text = httpx_pool.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie);
872 except httpx.ConnectTimeout:
873 log.info("Error With URL "+httpurl);
874 return False;
875 except httpx.ConnectError:
876 log.info("Error With URL "+httpurl);
877 return False;
878 except socket.timeout:
879 log.info("Error With URL "+httpurl);
880 return False;
881 httpcodeout = geturls_text.status_code;
882 httpcodereason = geturls_text.reason_phrase;
883 httpversionout = geturls_text.http_version;
884 httpmethodout = httpmethod;
885 httpurlout = str(geturls_text.url);
886 httpheaderout = geturls_text.headers;
887 httpheadersentout = geturls_text.request.headers;
888 elif(httplibuse=="httpx2"):
889 try:
890 if(httpmethod=="GET"):
891 httpx_pool = httpx.Client(http1=True, http2=True, trust_env=True);
892 geturls_text = httpx_pool.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie);
893 elif(httpmethod=="POST"):
894 httpx_pool = httpx.Client(http1=True, http2=True, trust_env=True);
895 geturls_text = httpx_pool.post(httpurl, timeout=timeout, data=postdata, headers=httpheaders, cookies=httpcookie);
896 else:
897 httpx_pool = httpx.Client(http1=True, http2=True, trust_env=True);
898 geturls_text = httpx_pool.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie);
899 except httpx.ConnectTimeout:
900 log.info("Error With URL "+httpurl);
901 return False;
902 except httpx.ConnectError:
903 log.info("Error With URL "+httpurl);
904 return False;
905 except socket.timeout:
906 log.info("Error With URL "+httpurl);
907 return False;
908 httpcodeout = geturls_text.status_code;
909 httpcodereason = geturls_text.reason;
910 httpversionout = geturls_text.http_version;
911 httpmethodout = httpmethod;
912 httpurlout = str(geturls_text.url);
913 httpheaderout = geturls_text.headers;
914 httpheadersentout = geturls_text.request.headers;
915 elif(httplibuse=="httpcore"):
916 try:
917 if(httpmethod=="GET"):
918 httpx_pool = httpcore.ConnectionPool(http1=True, http2=False);
919 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
920 elif(httpmethod=="POST"):
921 httpx_pool = httpcore.ConnectionPool(http1=True, http2=False);
922 geturls_text = httpx_pool.request("GET", httpurl, data=postdata, headers=httpheaders);
923 else:
924 httpx_pool = httpcore.ConnectionPool(http1=True, http2=False);
925 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
926 except httpcore.ConnectTimeout:
927 log.info("Error With URL "+httpurl);
928 return False;
929 except httpcore.ConnectError:
930 log.info("Error With URL "+httpurl);
931 return False;
932 except socket.timeout:
933 log.info("Error With URL "+httpurl);
934 return False;
935 httpcodeout = geturls_text.status;
936 httpcodereason = http_status_to_reason(geturls_text.status);
937 httpversionout = "1.1";
938 httpmethodout = httpmethod;
939 httpurlout = str(httpurl);
940 httpheaderout = geturls_text.headers;
941 httpheadersentout = httpheaders;
942 elif(httplibuse=="httpcore2"):
943 try:
944 if(httpmethod=="GET"):
945 httpx_pool = httpcore.ConnectionPool(http1=True, http2=True);
946 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
947 elif(httpmethod=="POST"):
948 httpx_pool = httpcore.ConnectionPool(http1=True, http2=True);
949 geturls_text = httpx_pool.request("GET", httpurl, data=postdata, headers=httpheaders);
950 else:
951 httpx_pool = httpcore.ConnectionPool(http1=True, http2=True);
952 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
953 except httpcore.ConnectTimeout:
954 log.info("Error With URL "+httpurl);
955 return False;
956 except httpcore.ConnectError:
957 log.info("Error With URL "+httpurl);
958 return False;
959 except socket.timeout:
960 log.info("Error With URL "+httpurl);
961 return False;
962 httpcodeout = geturls_text.status;
963 httpcodereason = geturls_text.reason;
964 httpversionout = "1.1";
965 httpmethodout = httpmethod;
966 httpurlout = str(httpurl);
967 httpheaderout = geturls_text.headers;
968 httpheadersentout = httpheaders;
969 elif(httplibuse=="mechanize"):
970 geturls_opener = mechanize.Browser();
971 if(isinstance(httpheaders, dict)):
972 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
973 time.sleep(sleep);
974 geturls_opener.addheaders = httpheaders;
975 geturls_opener.set_cookiejar(httpcookie);
976 geturls_opener.set_handle_robots(False);
977 if(postdata is not None and not isinstance(postdata, dict)):
978 postdata = urlencode(postdata);
979 try:
980 if(httpmethod=="GET"):
981 geturls_text = geturls_opener.open(httpurl);
982 elif(httpmethod=="POST"):
983 geturls_text = geturls_opener.open(httpurl, data=postdata);
984 else:
985 geturls_text = geturls_opener.open(httpurl);
986 except mechanize.HTTPError as geturls_text_error:
987 geturls_text = geturls_text_error;
988 log.info("Error With URL "+httpurl);
989 except URLError:
990 log.info("Error With URL "+httpurl);
991 return False;
992 except socket.timeout:
993 log.info("Error With URL "+httpurl);
994 return False;
995 httpcodeout = geturls_text.code;
996 httpcodereason = geturls_text.msg;
997 httpversionout = "1.1";
998 httpmethodout = httpmethod;
999 httpurlout = geturls_text.geturl();
1000 httpheaderout = geturls_text.info();
1001 reqhead = geturls_opener.request;
1002 httpheadersentout = reqhead.header_items();
1003 elif(httplibuse=="pycurl"):
1004 retrieved_body = BytesIO();
1005 retrieved_headers = BytesIO();
1006 try:
1007 if(httpmethod=="GET"):
1008 geturls_text = pycurl.Curl();
1009 geturls_text.setopt(geturls_text.URL, httpurl);
1010 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_1_1);
1011 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1012 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1013 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1014 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1015 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1016 geturls_text.perform();
1017 elif(httpmethod=="POST"):
1018 geturls_text = pycurl.Curl();
1019 geturls_text.setopt(geturls_text.URL, httpurl);
1020 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_1_1);
1021 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1022 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1023 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1024 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1025 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1026 geturls_text.setopt(geturls_text.POST, True);
1027 geturls_text.setopt(geturls_text.POSTFIELDS, postdata);
1028 geturls_text.perform();
1029 else:
1030 geturls_text = pycurl.Curl();
1031 geturls_text.setopt(geturls_text.URL, httpurl);
1032 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_1_1);
1033 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1034 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1035 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1036 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1037 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1038 geturls_text.perform();
1039 retrieved_headers.seek(0);
1040 if(sys.version[0]=="2"):
1041 pycurlhead = retrieved_headers.read();
1042 if(sys.version[0]>="3"):
1043 pycurlhead = retrieved_headers.read().decode('UTF-8');
1044 pyhttpverinfo = re.findall(r'^HTTP/([0-9.]+) (\d+) ([A-Za-z\s]+)$', pycurlhead.splitlines()[0])[0];
1045 pycurlheadersout = make_http_headers_from_pycurl_to_dict(pycurlhead);
1046 retrieved_body.seek(0);
1047 except socket.timeout:
1048 log.info("Error With URL "+httpurl);
1049 return False;
1050 except socket.gaierror:
1051 log.info("Error With URL "+httpurl);
1052 return False;
1053 except ValueError:
1054 log.info("Error With URL "+httpurl);
1055 return False;
1056 httpcodeout = geturls_text.getinfo(geturls_text.HTTP_CODE);
1057 httpcodereason = http_status_to_reason(geturls_text.getinfo(geturls_text.HTTP_CODE));
1058 httpversionout = pyhttpverinfo[0];
1059 httpmethodout = httpmethod;
1060 httpurlout = geturls_text.getinfo(geturls_text.EFFECTIVE_URL);
1061 httpheaderout = pycurlheadersout;
1062 httpheadersentout = httpheaders;
1063 elif(httplibuse=="pycurl2"):
1064 retrieved_body = BytesIO();
1065 retrieved_headers = BytesIO();
1066 try:
1067 if(httpmethod=="GET"):
1068 geturls_text = pycurl.Curl();
1069 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_2_0);
1070 geturls_text.setopt(geturls_text.URL, httpurl);
1071 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1072 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1073 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1074 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1075 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1076 geturls_text.perform();
1077 elif(httpmethod=="POST"):
1078 geturls_text = pycurl.Curl();
1079 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_2_0);
1080 geturls_text.setopt(geturls_text.URL, httpurl);
1081 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1082 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1083 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1084 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1085 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1086 geturls_text.setopt(geturls_text.POST, True);
1087 geturls_text.setopt(geturls_text.POSTFIELDS, postdata);
1088 geturls_text.perform();
1089 else:
1090 geturls_text = pycurl.Curl();
1091 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_2_0);
1092 geturls_text.setopt(geturls_text.URL, httpurl);
1093 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1094 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1095 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1096 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1097 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1098 geturls_text.perform();
1099 retrieved_headers.seek(0);
1100 if(sys.version[0]=="2"):
1101 pycurlhead = retrieved_headers.read();
1102 if(sys.version[0]>="3"):
1103 pycurlhead = retrieved_headers.read().decode('UTF-8');
1104 pyhttpverinfo = re.findall(r'^HTTP/([0-9.]+) (\d+) ([A-Za-z\s]+)$', pycurlhead.splitlines()[0])[0];
1105 pycurlheadersout = make_http_headers_from_pycurl_to_dict(pycurlhead);
1106 retrieved_body.seek(0);
1107 except socket.timeout:
1108 log.info("Error With URL "+httpurl);
1109 return False;
1110 except socket.gaierror:
1111 log.info("Error With URL "+httpurl);
1112 return False;
1113 except ValueError:
1114 log.info("Error With URL "+httpurl);
1115 return False;
1116 httpcodeout = geturls_text.getinfo(geturls_text.HTTP_CODE);
1117 httpcodereason = http_status_to_reason(geturls_text.getinfo(geturls_text.HTTP_CODE));
1118 httpversionout = pyhttpverinfo[0];
1119 httpmethodout = httpmethod;
1120 httpurlout = geturls_text.getinfo(geturls_text.EFFECTIVE_URL);
1121 httpheaderout = pycurlheadersout;
1122 httpheadersentout = httpheaders;
1123 elif(httplibuse=="pycurl3"):
1124 retrieved_body = BytesIO();
1125 retrieved_headers = BytesIO();
1126 try:
1127 if(httpmethod=="GET"):
1128 geturls_text = pycurl.Curl();
1129 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_3_0);
1130 geturls_text.setopt(geturls_text.URL, httpurl);
1131 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1132 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1133 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1134 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1135 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1136 geturls_text.perform();
1137 elif(httpmethod=="POST"):
1138 geturls_text = pycurl.Curl();
1139 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_3_0);
1140 geturls_text.setopt(geturls_text.URL, httpurl);
1141 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1142 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1143 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1144 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1145 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1146 geturls_text.setopt(geturls_text.POST, True);
1147 geturls_text.setopt(geturls_text.POSTFIELDS, postdata);
1148 geturls_text.perform();
1149 else:
1150 geturls_text = pycurl.Curl();
1151 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_3_0);
1152 geturls_text.setopt(geturls_text.URL, httpurl);
1153 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1154 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1155 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1156 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1157 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1158 geturls_text.perform();
1159 retrieved_headers.seek(0);
1160 if(sys.version[0]=="2"):
1161 pycurlhead = retrieved_headers.read();
1162 if(sys.version[0]>="3"):
1163 pycurlhead = retrieved_headers.read().decode('UTF-8');
1164 pyhttpverinfo = re.findall(r'^HTTP/([0-9.]+) (\d+) ([A-Za-z\s]+)$', pycurlhead.splitlines()[0])[0];
1165 pycurlheadersout = make_http_headers_from_pycurl_to_dict(pycurlhead);
1166 retrieved_body.seek(0);
1167 except socket.timeout:
1168 log.info("Error With URL "+httpurl);
1169 return False;
1170 except socket.gaierror:
1171 log.info("Error With URL "+httpurl);
1172 return False;
1173 except ValueError:
1174 log.info("Error With URL "+httpurl);
1175 return False;
1176 httpcodeout = geturls_text.getinfo(geturls_text.HTTP_CODE);
1177 httpcodereason = http_status_to_reason(geturls_text.getinfo(geturls_text.HTTP_CODE));
1178 httpversionout = pyhttpverinfo[0];
1179 httpmethodout = httpmethod;
1180 httpurlout = geturls_text.getinfo(geturls_text.EFFECTIVE_URL);
1181 httpheaderout = pycurlheadersout;
1182 httpheadersentout = httpheaders;
1183 elif(httplibuse=="ftp"):
1184 geturls_text = download_file_from_ftp_file(httpurl);
1185 if(not geturls_text):
1186 return False;
1187 log.info("Downloading URL "+httpurl);
1188 returnval_content = geturls_text.read()[:];
1189 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None};
1190 geturls_text.close();
1191 elif(httplibuse=="sftp"):
1192 geturls_text = download_file_from_sftp_file(httpurl);
1193 if(not geturls_text):
1194 return False;
1195 log.info("Downloading URL "+httpurl);
1196 returnval_content = geturls_text.read()[:];
1197 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None};
1198 geturls_text.close();
1199 return returnval;
1200 elif(httplibuse=="pysftp"):
1201 geturls_text = download_file_from_pysftp_file(httpurl);
1202 if(not geturls_text):
1203 return False;
1204 log.info("Downloading URL "+httpurl);
1205 returnval_content = geturls_text.read()[:];
1206 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None};
1207 geturls_text.close();
1208 return returnval;
1209 else:
1210 returnval = False;
1211 if(isinstance(httpheaderout, list) and (httplibuse!="pycurl" and httplibuse!="pycurl2" and httplibuse!="pycurl3")):
1212 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
1213 if(isinstance(httpheaderout, list) and (httplibuse=="pycurl" or httplibuse=="pycurl2" or httplibuse=="pycurl3")):
1214 httpheaderout = dict(make_http_headers_from_pycurl_to_dict("\r\n".join(httpheaderout)));
1215 if(sys.version[0]=="2"):
1216 try:
1217 prehttpheaderout = httpheaderout;
1218 httpheaderkeys = httpheaderout.keys();
1219 imax = len(httpheaderkeys);
1220 ic = 0;
1221 httpheaderout = {};
1222 while(ic < imax):
1223 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
1224 ic += 1;
1225 except AttributeError:
1226 pass;
1227 httpheaderout = fix_header_names(httpheaderout);
1228 if(isinstance(httpheadersentout, list) and (httplibuse!="pycurl" and httplibuse!="pycurl2" and httplibuse!="pycurl3")):
1229 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
1230 if(isinstance(httpheadersentout, list) and (httplibuse=="pycurl" or httplibuse=="pycurl2" or httplibuse=="pycurl3")):
1231 httpheadersentout = dict(make_http_headers_from_pycurl_to_dict("\r\n".join(httpheadersentout)));
1232 httpheadersentout = fix_header_names(httpheadersentout);
1233 log.info("Downloading URL "+httpurl);
1234 if(httplibuse=="urllib" or httplibuse=="request" or httplibuse=="request3" or httplibuse=="httplib" or httplibuse=="httplib2" or httplibuse=="urllib3" or httplibuse=="mechanize" or httplibuse=="httpx" or httplibuse=="httpx2" or httplibuse=="httpcore" or httplibuse=="httpcore2"):
1235 if(httpheaderout.get("Content-Encoding")=="gzip" or httpheaderout.get("Content-Encoding")=="deflate"):
1236 downloadsize = httpheaderout.get('Content-Length');
1237 if(downloadsize is not None):
1238 downloadsize = int(downloadsize);
1239 if downloadsize is None: downloadsize = 0;
1240 fulldatasize = 0;
1241 prevdownsize = 0;
1242 log.info("Downloading URL "+httpurl);
1243 with BytesIO() as strbuf:
1244 while True:
1245 databytes = geturls_text.read(buffersize);
1246 if not databytes: break;
1247 datasize = len(databytes);
1248 fulldatasize = datasize + fulldatasize;
1249 percentage = "";
1250 if(downloadsize>0):
1251 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1252 downloaddiff = fulldatasize - prevdownsize;
1253 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1254 prevdownsize = fulldatasize;
1255 strbuf.write(databytes);
1256 strbuf.seek(0);
1257 gzstrbuf = gzip.GzipFile(fileobj=strbuf);
1258 returnval_content = gzstrbuf.read()[:];
1259 if(httpheaderout.get("Content-Encoding")!="gzip" and httpheaderout.get("Content-Encoding")!="deflate" and httpheaderout.get("Content-Encoding")!="br"):
1260 downloadsize = httpheaderout.get('Content-Length');
1261 if(downloadsize is not None):
1262 downloadsize = int(downloadsize);
1263 if downloadsize is None: downloadsize = 0;
1264 fulldatasize = 0;
1265 prevdownsize = 0;
1266 log.info("Downloading URL "+httpurl);
1267 with BytesIO() as strbuf:
1268 while True:
1269 databytes = geturls_text.read(buffersize);
1270 if not databytes: break;
1271 datasize = len(databytes);
1272 fulldatasize = datasize + fulldatasize;
1273 percentage = "";
1274 if(downloadsize>0):
1275 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1276 downloaddiff = fulldatasize - prevdownsize;
1277 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1278 prevdownsize = fulldatasize;
1279 strbuf.write(databytes);
1280 strbuf.seek(0);
1281 returnval_content = strbuf.read();
1282 if(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
1283 downloadsize = httpheaderout.get('Content-Length');
1284 if(downloadsize is not None):
1285 downloadsize = int(downloadsize);
1286 if downloadsize is None: downloadsize = 0;
1287 fulldatasize = 0;
1288 prevdownsize = 0;
1289 log.info("Downloading URL "+httpurl);
1290 with BytesIO() as strbuf:
1291 while True:
1292 databytes = geturls_text.read(buffersize);
1293 if not databytes: break;
1294 datasize = len(databytes);
1295 fulldatasize = datasize + fulldatasize;
1296 percentage = "";
1297 if(downloadsize>0):
1298 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1299 downloaddiff = fulldatasize - prevdownsize;
1300 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1301 prevdownsize = fulldatasize;
1302 strbuf.write(databytes);
1303 strbuf.seek(0);
1304 returnval_content = strbuf.read();
1305 returnval_content = brotli.decompress(returnval_content);
1306 geturls_text.close();
1307 elif(httplibuse=="requests"):
1308 log.info("Downloading URL "+httpurl);
1309 if(httpheaderout.get("Content-Encoding")=="gzip" or httpheaderout.get("Content-Encoding")=="deflate"):
1310 downloadsize = httpheaderout.get('Content-Length');
1311 if(downloadsize is not None):
1312 downloadsize = int(downloadsize);
1313 if downloadsize is None: downloadsize = 0;
1314 fulldatasize = 0;
1315 prevdownsize = 0;
1316 log.info("Downloading URL "+httpurl);
1317 with BytesIO() as strbuf:
1318 while True:
1319 databytes = geturls_text.raw.read(buffersize);
1320 if not databytes: break;
1321 datasize = len(databytes);
1322 fulldatasize = datasize + fulldatasize;
1323 percentage = "";
1324 if(downloadsize>0):
1325 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1326 downloaddiff = fulldatasize - prevdownsize;
1327 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1328 prevdownsize = fulldatasize;
1329 strbuf.write(databytes);
1330 strbuf.seek(0);
1331 gzstrbuf = gzip.GzipFile(fileobj=strbuf);
1332 returnval_content = gzstrbuf.read()[:];
1333 if(httpheaderout.get("Content-Encoding")!="gzip" and httpheaderout.get("Content-Encoding")!="deflate" and httpheaderout.get("Content-Encoding")!="br"):
1334 downloadsize = httpheaderout.get('Content-Length');
1335 if(downloadsize is not None):
1336 downloadsize = int(downloadsize);
1337 if downloadsize is None: downloadsize = 0;
1338 fulldatasize = 0;
1339 prevdownsize = 0;
1340 log.info("Downloading URL "+httpurl);
1341 with BytesIO() as strbuf:
1342 while True:
1343 databytes = geturls_text.raw.read(buffersize);
1344 if not databytes: break;
1345 datasize = len(databytes);
1346 fulldatasize = datasize + fulldatasize;
1347 percentage = "";
1348 if(downloadsize>0):
1349 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1350 downloaddiff = fulldatasize - prevdownsize;
1351 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1352 prevdownsize = fulldatasize;
1353 strbuf.write(databytes);
1354 strbuf.seek(0);
1355 returnval_content = strbuf.read();
1356 if(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
1357 downloadsize = httpheaderout.get('Content-Length');
1358 if(downloadsize is not None):
1359 downloadsize = int(downloadsize);
1360 if downloadsize is None: downloadsize = 0;
1361 fulldatasize = 0;
1362 prevdownsize = 0;
1363 log.info("Downloading URL "+httpurl);
1364 with BytesIO() as strbuf:
1365 while True:
1366 databytes = geturls_text.raw.read(buffersize);
1367 if not databytes: break;
1368 datasize = len(databytes);
1369 fulldatasize = datasize + fulldatasize;
1370 percentage = "";
1371 if(downloadsize>0):
1372 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1373 downloaddiff = fulldatasize - prevdownsize;
1374 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1375 prevdownsize = fulldatasize;
1376 strbuf.write(databytes);
1377 strbuf.seek(0);
1378 returnval_content = strbuf.read();
1379 returnval_content = brotli.decompress(returnval_content);
1380 geturls_text.close();
1381 elif(httplibuse=="pycurl" or httplibuse=="pycurl2" or httplibuse=="pycurl3"):
1382 log.info("Downloading URL "+httpurl);
1383 if(httpheaderout.get("Content-Encoding")=="gzip" or httpheaderout.get("Content-Encoding")=="deflate"):
1384 downloadsize = httpheaderout.get('Content-Length');
1385 if(downloadsize is not None):
1386 downloadsize = int(downloadsize);
1387 if downloadsize is None: downloadsize = 0;
1388 fulldatasize = 0;
1389 prevdownsize = 0;
1390 log.info("Downloading URL "+httpurl);
1391 with BytesIO() as strbuf:
1392 while True:
1393 databytes = retrieved_body.read(buffersize);
1394 if not databytes: break;
1395 datasize = len(databytes);
1396 fulldatasize = datasize + fulldatasize;
1397 percentage = "";
1398 if(downloadsize>0):
1399 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1400 downloaddiff = fulldatasize - prevdownsize;
1401 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1402 prevdownsize = fulldatasize;
1403 strbuf.write(databytes);
1404 strbuf.seek(0);
1405 gzstrbuf = gzip.GzipFile(fileobj=strbuf);
1406 returnval_content = gzstrbuf.read()[:];
1407 if(httpheaderout.get("Content-Encoding")!="gzip" and httpheaderout.get("Content-Encoding")!="deflate" and httpheaderout.get("Content-Encoding")!="br"):
1408 downloadsize = httpheaderout.get('Content-Length');
1409 if(downloadsize is not None):
1410 downloadsize = int(downloadsize);
1411 if downloadsize is None: downloadsize = 0;
1412 fulldatasize = 0;
1413 prevdownsize = 0;
1414 log.info("Downloading URL "+httpurl);
1415 with BytesIO() as strbuf:
1416 while True:
1417 databytes = retrieved_body.read(buffersize);
1418 if not databytes: break;
1419 datasize = len(databytes);
1420 fulldatasize = datasize + fulldatasize;
1421 percentage = "";
1422 if(downloadsize>0):
1423 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1424 downloaddiff = fulldatasize - prevdownsize;
1425 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1426 prevdownsize = fulldatasize;
1427 strbuf.write(databytes);
1428 strbuf.seek(0);
1429 returnval_content = strbuf.read();
1430 if(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
1431 downloadsize = httpheaderout.get('Content-Length');
1432 if(downloadsize is not None):
1433 downloadsize = int(downloadsize);
1434 if downloadsize is None: downloadsize = 0;
1435 fulldatasize = 0;
1436 prevdownsize = 0;
1437 log.info("Downloading URL "+httpurl);
1438 with BytesIO() as strbuf:
1439 while True:
1440 databytes = retrieved_body.read(buffersize);
1441 if not databytes: break;
1442 datasize = len(databytes);
1443 fulldatasize = datasize + fulldatasize;
1444 percentage = "";
1445 if(downloadsize>0):
1446 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1447 downloaddiff = fulldatasize - prevdownsize;
1448 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1449 prevdownsize = fulldatasize;
1450 strbuf.write(databytes);
1451 strbuf.seek(0);
1452 returnval_content = strbuf.read();
1453 returnval_content = brotli.decompress(returnval_content);
1454 geturls_text.close();
1455 elif(httplibuse=="ftp" or httplibuse=="sftp" or httplibuse=="pysftp"):
1456 pass;
1457 else:
1458 returnval = False;
1459 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason};
1460 return returnval;
1462 def download_from_url_file(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
1463 global geturls_download_sleep, tmpfileprefix, tmpfilesuffix, haverequests, havemechanize, havehttplib2, haveurllib3, havehttpx, havehttpcore, haveparamiko, havepysftp;
1464 exec_time_start = time.time();
1465 myhash = hashlib.new("sha1");
1466 if(sys.version[0]=="2"):
1467 myhash.update(httpurl);
1468 myhash.update(str(buffersize));
1469 myhash.update(str(exec_time_start));
1470 if(sys.version[0]>="3"):
1471 myhash.update(httpurl.encode('utf-8'));
1472 myhash.update(str(buffersize).encode('utf-8'));
1473 myhash.update(str(exec_time_start).encode('utf-8'));
1474 newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest());
1475 if(sleep<0):
1476 sleep = geturls_download_sleep;
1477 if(timeout<=0):
1478 timeout = 10;
1479 if(httplibuse=="urllib1" or httplibuse=="urllib2" or httplibuse=="request"):
1480 httplibuse = "urllib";
1481 if(httplibuse=="httplib1"):
1482 httplibuse = "httplib";
1483 if(not haverequests and httplibuse=="requests"):
1484 httplibuse = "urllib";
1485 if(not havehttpx and httplibuse=="httpx"):
1486 httplibuse = "urllib";
1487 if(not havehttpx and httplibuse=="httpx2"):
1488 httplibuse = "urllib";
1489 if(not havehttpcore and httplibuse=="httpcore"):
1490 httplibuse = "urllib";
1491 if(not havehttpcore and httplibuse=="httpcore2"):
1492 httplibuse = "urllib";
1493 if(not havemechanize and httplibuse=="mechanize"):
1494 httplibuse = "urllib";
1495 if(not havepycurl and httplibuse=="pycurl"):
1496 httplibuse = "urllib";
1497 if(not havepycurl and httplibuse=="pycurl2"):
1498 httplibuse = "urllib";
1499 if(not havepycurl and httplibuse=="pycurl3"):
1500 httplibuse = "urllib";
1501 if(not havehttplib2 and httplibuse=="httplib2"):
1502 httplibuse = "httplib";
1503 if(not haveparamiko and httplibuse=="sftp"):
1504 httplibuse = "ftp";
1505 if(not haveparamiko and httplibuse=="pysftp"):
1506 httplibuse = "ftp";
1507 urlparts = urlparse.urlparse(httpurl);
1508 if(isinstance(httpheaders, list)):
1509 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
1510 httpheaders = fix_header_names(httpheaders);
1511 if(ranges[0] is not None):
1512 range_str = "bytes="+str(range[0])+"-";
1513 if(ranges[1] is not None and ranges[1]>ranges[0]):
1514 range_str += str(range[1]);
1515 if('Range' in httpheaders):
1516 httpheaders['Range'] = range_str;
1517 else:
1518 httpuseragent.update({'Range': range_str});
1519 if(httpuseragent is not None):
1520 if('User-Agent' in httpheaders):
1521 httpheaders['User-Agent'] = httpuseragent;
1522 else:
1523 httpuseragent.update({'User-Agent': httpuseragent});
1524 if(httpreferer is not None):
1525 if('Referer' in httpheaders):
1526 httpheaders['Referer'] = httpreferer;
1527 else:
1528 httpuseragent.update({'Referer': httpreferer});
1529 if(urlparts.username is not None or urlparts.password is not None):
1530 if(sys.version[0]=="2"):
1531 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password));
1532 if(sys.version[0]>="3"):
1533 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
1534 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
1535 geturls_opener = build_opener(HTTPCookieProcessor(httpcookie));
1536 if(httplibuse=="urllib" or httplibuse=="mechanize"):
1537 if(isinstance(httpheaders, dict)):
1538 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
1539 if(httplibuse=="pycurl" or httplibuse=="pycurl2" or httplibuse=="pycurl3"):
1540 if(isinstance(httpheaders, dict)):
1541 httpheaders = make_http_headers_from_dict_to_pycurl(httpheaders);
1542 geturls_opener.addheaders = httpheaders;
1543 time.sleep(sleep);
1544 if(httplibuse=="urllib" or httplibuse=="request"):
1545 try:
1546 geturls_request = Request(httpurl);
1547 if(httpmethod=="GET"):
1548 geturls_text = geturls_opener.open(geturls_request);
1549 elif(httpmethod=="POST"):
1550 geturls_text = geturls_opener.open(geturls_request, data=postdata);
1551 else:
1552 geturls_text = geturls_opener.open(geturls_request);
1553 except HTTPError as geturls_text_error:
1554 geturls_text = geturls_text_error;
1555 log.info("Error With URL "+httpurl);
1556 except URLError:
1557 log.info("Error With URL "+httpurl);
1558 return False;
1559 except socket.timeout:
1560 log.info("Error With URL "+httpurl);
1561 return False;
1562 except socket.timeout:
1563 log.info("Error With URL "+httpurl);
1564 return False;
1565 httpcodeout = geturls_text.getcode();
1566 try:
1567 httpcodereason = geturls_text.reason;
1568 except AttributeError:
1569 httpcodereason = http_status_to_reason(geturls_text.getcode());
1570 try:
1571 httpversionout = geturls_text.version;
1572 except AttributeError:
1573 httpversionout = "1.1";
1574 httpmethodout = geturls_request.get_method();
1575 httpurlout = geturls_text.geturl();
1576 httpheaderout = geturls_text.info();
1577 httpheadersentout = httpheaders;
1578 elif(httplibuse=="httplib"):
1579 if(urlparts[0]=="http"):
1580 httpconn = HTTPConnection(urlparts[1], timeout=timeout);
1581 elif(urlparts[0]=="https"):
1582 httpconn = HTTPSConnection(urlparts[1], timeout=timeout);
1583 else:
1584 return False;
1585 if(postdata is not None and not isinstance(postdata, dict)):
1586 postdata = urlencode(postdata);
1587 try:
1588 if(httpmethod=="GET"):
1589 httpconn.request("GET", urlparts[2], headers=httpheaders);
1590 elif(httpmethod=="POST"):
1591 httpconn.request("GET", urlparts[2], body=postdata, headers=httpheaders);
1592 else:
1593 httpconn.request("GET", urlparts[2], headers=httpheaders);
1594 except socket.timeout:
1595 log.info("Error With URL "+httpurl);
1596 return False;
1597 except socket.gaierror:
1598 log.info("Error With URL "+httpurl);
1599 return False;
1600 except BlockingIOError:
1601 log.info("Error With URL "+httpurl);
1602 return False;
1603 geturls_text = httpconn.getresponse();
1604 httpcodeout = geturls_text.status;
1605 httpcodereason = geturls_text.reason;
1606 if(geturls_text.version=="10"):
1607 httpversionout = "1.0";
1608 else:
1609 httpversionout = "1.1";
1610 httpmethodout = geturls_text._method;
1611 httpurlout = httpurl;
1612 httpheaderout = geturls_text.getheaders();
1613 httpheadersentout = httpheaders;
1614 elif(httplibuse=="httplib2"):
1615 try:
1616 if(httpmethod=="GET"):
1617 httpconn.request("GET", urlparts[2], headers=httpheaders);
1618 elif(httpmethod=="POST"):
1619 httpconn.request("GET", urlparts[2], body=postdata, headers=httpheaders);
1620 else:
1621 httpconn.request("GET", urlparts[2], headers=httpheaders);
1622 except socket.timeout:
1623 log.info("Error With URL "+httpurl);
1624 return False;
1625 except socket.gaierror:
1626 log.info("Error With URL "+httpurl);
1627 return False;
1628 except BlockingIOError:
1629 log.info("Error With URL "+httpurl);
1630 return False;
1631 geturls_text = httpconn.getresponse();
1632 httpcodeout = geturls_text.status;
1633 httpcodereason = geturls_text.reason;
1634 if(geturls_text.version=="10"):
1635 httpversionout = "1.0";
1636 else:
1637 httpversionout = "1.1";
1638 httpmethodout = httpmethod;
1639 httpurlout = geturls_text.geturl();
1640 httpheaderout = geturls_text.getheaders();
1641 httpheadersentout = httpheaders;
1642 elif(httplibuse=="urllib3" or httplibuse=="request3"):
1643 timeout = urllib3.util.Timeout(connect=timeout, read=timeout);
1644 urllib_pool = urllib3.PoolManager(headers=httpheaders, timeout=timeout);
1645 try:
1646 if(httpmethod=="GET"):
1647 geturls_text = urllib_pool.request("GET", httpurl, headers=httpheaders, preload_content=False);
1648 elif(httpmethod=="POST"):
1649 geturls_text = urllib_pool.request("POST", httpurl, body=postdata, headers=httpheaders, preload_content=False);
1650 else:
1651 geturls_text = urllib_pool.request("GET", httpurl, headers=httpheaders, preload_content=False);
1652 except urllib3.exceptions.ConnectTimeoutError:
1653 log.info("Error With URL "+httpurl);
1654 return False;
1655 except urllib3.exceptions.ConnectError:
1656 log.info("Error With URL "+httpurl);
1657 return False;
1658 except urllib3.exceptions.MaxRetryError:
1659 log.info("Error With URL "+httpurl);
1660 return False;
1661 except socket.timeout:
1662 log.info("Error With URL "+httpurl);
1663 return False;
1664 except ValueError:
1665 log.info("Error With URL "+httpurl);
1666 return False;
1667 httpcodeout = geturls_text.status;
1668 httpcodereason = geturls_text.reason;
1669 if(geturls_text.version=="10"):
1670 httpversionout = "1.0";
1671 else:
1672 httpversionout = "1.1";
1673 httpmethodout = httpmethod;
1674 httpurlout = geturls_text.geturl();
1675 httpheaderout = geturls_text.info();
1676 httpheadersentout = httpheaders;
1677 elif(httplibuse=="requests"):
1678 try:
1679 reqsession = requests.Session();
1680 if(httpmethod=="GET"):
1681 geturls_text = reqsession.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie, stream=True);
1682 elif(httpmethod=="POST"):
1683 geturls_text = reqsession.post(httpurl, timeout=timeout, data=postdata, headers=httpheaders, cookies=httpcookie, stream=True);
1684 else:
1685 geturls_text = reqsession.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie, stream=True);
1686 except requests.exceptions.ConnectTimeout:
1687 log.info("Error With URL "+httpurl);
1688 return False;
1689 except requests.exceptions.ConnectError:
1690 log.info("Error With URL "+httpurl);
1691 return False;
1692 except socket.timeout:
1693 log.info("Error With URL "+httpurl);
1694 return False;
1695 httpcodeout = geturls_text.status_code;
1696 httpcodereason = geturls_text.reason;
1697 if(geturls_text.raw.version=="10"):
1698 httpversionout = "1.0";
1699 else:
1700 httpversionout = "1.1";
1701 httpmethodout = httpmethod;
1702 httpurlout = geturls_text.url;
1703 httpheaderout = geturls_text.headers;
1704 httpheadersentout = geturls_text.request.headers;
1705 elif(httplibuse=="httpx"):
1706 try:
1707 if(httpmethod=="GET"):
1708 httpx_pool = httpx.Client(http1=True, http2=False, trust_env=True);
1709 geturls_text = httpx_pool.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie);
1710 elif(httpmethod=="POST"):
1711 httpx_pool = httpx.Client(http1=True, http2=False, trust_env=True);
1712 geturls_text = httpx_pool.post(httpurl, timeout=timeout, data=postdata, headers=httpheaders, cookies=httpcookie);
1713 else:
1714 httpx_pool = httpx.Client(http1=True, http2=False, trust_env=True);
1715 geturls_text = httpx_pool.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie);
1716 except httpx.ConnectTimeout:
1717 log.info("Error With URL "+httpurl);
1718 return False;
1719 except httpx.ConnectError:
1720 log.info("Error With URL "+httpurl);
1721 return False;
1722 except socket.timeout:
1723 log.info("Error With URL "+httpurl);
1724 return False;
1725 httpcodeout = geturls_text.status_code;
1726 httpcodereason = geturls_text.reason_phrase;
1727 httpversionout = geturls_text.http_version;
1728 httpmethodout = httpmethod;
1729 httpurlout = str(geturls_text.url);
1730 httpheaderout = geturls_text.headers;
1731 httpheadersentout = geturls_text.request.headers;
1732 elif(httplibuse=="httpx2"):
1733 try:
1734 if(httpmethod=="GET"):
1735 httpx_pool = httpx.Client(http1=True, http2=True, trust_env=True);
1736 geturls_text = httpx_pool.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie);
1737 elif(httpmethod=="POST"):
1738 httpx_pool = httpx.Client(http1=True, http2=True, trust_env=True);
1739 geturls_text = httpx_pool.post(httpurl, timeout=timeout, data=postdata, headers=httpheaders, cookies=httpcookie);
1740 else:
1741 httpx_pool = httpx.Client(http1=True, http2=True, trust_env=True);
1742 geturls_text = httpx_pool.get(httpurl, timeout=timeout, headers=httpheaders, cookies=httpcookie);
1743 except httpx.ConnectTimeout:
1744 log.info("Error With URL "+httpurl);
1745 return False;
1746 except httpx.ConnectError:
1747 log.info("Error With URL "+httpurl);
1748 return False;
1749 except socket.timeout:
1750 log.info("Error With URL "+httpurl);
1751 return False;
1752 httpcodeout = geturls_text.status_code;
1753 httpcodereason = geturls_text.reason_phrase;
1754 httpversionout = geturls_text.http_version;
1755 httpmethodout = httpmethod;
1756 httpurlout = str(geturls_text.url);
1757 httpheaderout = geturls_text.headers;
1758 httpheadersentout = geturls_text.request.headers;
1759 elif(httplibuse=="httpcore"):
1760 try:
1761 if(httpmethod=="GET"):
1762 httpx_pool = httpcore.ConnectionPool(http1=True, http2=False);
1763 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
1764 elif(httpmethod=="POST"):
1765 httpx_pool = httpcore.ConnectionPool(http1=True, http2=False);
1766 geturls_text = httpx_pool.request("GET", httpurl, data=postdata, headers=httpheaders);
1767 else:
1768 httpx_pool = httpcore.ConnectionPool(http1=True, http2=False);
1769 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
1770 except httpcore.ConnectTimeout:
1771 log.info("Error With URL "+httpurl);
1772 return False;
1773 except httpcore.ConnectError:
1774 log.info("Error With URL "+httpurl);
1775 return False;
1776 except socket.timeout:
1777 log.info("Error With URL "+httpurl);
1778 return False;
1779 httpcodeout = geturls_text.status;
1780 httpcodereason = http_status_to_reason(geturls_text.status);
1781 httpversionout = "1.1";
1782 httpmethodout = httpmethod;
1783 httpurlout = str(httpurl);
1784 httpheaderout = geturls_text.headers;
1785 httpheadersentout = httpheaders;
1786 elif(httplibuse=="httpcore2"):
1787 try:
1788 if(httpmethod=="GET"):
1789 httpx_pool = httpcore.ConnectionPool(http1=True, http2=True);
1790 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
1791 elif(httpmethod=="POST"):
1792 httpx_pool = httpcore.ConnectionPool(http1=True, http2=True);
1793 geturls_text = httpx_pool.request("GET", httpurl, data=postdata, headers=httpheaders);
1794 else:
1795 httpx_pool = httpcore.ConnectionPool(http1=True, http2=True);
1796 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
1797 except httpcore.ConnectTimeout:
1798 log.info("Error With URL "+httpurl);
1799 return False;
1800 except httpcore.ConnectError:
1801 log.info("Error With URL "+httpurl);
1802 return False;
1803 except socket.timeout:
1804 log.info("Error With URL "+httpurl);
1805 return False;
1806 httpcodeout = geturls_text.status;
1807 httpcodereason = geturls_text.reason_phrase;
1808 httpversionout = "1.1";
1809 httpmethodout = httpmethod;
1810 httpurlout = str(httpurl);
1811 httpheaderout = geturls_text.headers;
1812 httpheadersentout = httpheaders;
1813 elif(httplibuse=="mechanize"):
1814 geturls_opener = mechanize.Browser();
1815 if(isinstance(httpheaders, dict)):
1816 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
1817 time.sleep(sleep);
1818 geturls_opener.addheaders = httpheaders;
1819 geturls_opener.set_cookiejar(httpcookie);
1820 geturls_opener.set_handle_robots(False);
1821 if(postdata is not None and not isinstance(postdata, dict)):
1822 postdata = urlencode(postdata);
1823 try:
1824 if(httpmethod=="GET"):
1825 geturls_text = geturls_opener.open(httpurl);
1826 elif(httpmethod=="POST"):
1827 geturls_text = geturls_opener.open(httpurl, data=postdata);
1828 else:
1829 geturls_text = geturls_opener.open(httpurl);
1830 except mechanize.HTTPError as geturls_text_error:
1831 geturls_text = geturls_text_error;
1832 log.info("Error With URL "+httpurl);
1833 except URLError:
1834 log.info("Error With URL "+httpurl);
1835 return False;
1836 except socket.timeout:
1837 log.info("Error With URL "+httpurl);
1838 return False;
1839 httpcodeout = geturls_text.code;
1840 httpcodereason = geturls_text.msg;
1841 httpversionout = "1.1";
1842 httpmethodout = httpmethod;
1843 httpurlout = geturls_text.geturl();
1844 httpheaderout = geturls_text.info();
1845 reqhead = geturls_opener.request;
1846 httpheadersentout = reqhead.header_items();
1847 elif(httplibuse=="pycurl"):
1848 retrieved_body = BytesIO();
1849 retrieved_headers = BytesIO();
1850 try:
1851 if(httpmethod=="GET"):
1852 geturls_text = pycurl.Curl();
1853 geturls_text.setopt(geturls_text.URL, httpurl);
1854 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_1_1);
1855 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1856 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1857 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1858 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1859 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1860 geturls_text.perform();
1861 elif(httpmethod=="POST"):
1862 geturls_text = pycurl.Curl();
1863 geturls_text.setopt(geturls_text.URL, httpurl);
1864 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_1_1);
1865 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1866 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1867 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1868 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1869 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1870 geturls_text.setopt(geturls_text.POST, True);
1871 geturls_text.setopt(geturls_text.POSTFIELDS, postdata);
1872 geturls_text.perform();
1873 else:
1874 geturls_text = pycurl.Curl();
1875 geturls_text.setopt(geturls_text.URL, httpurl);
1876 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_1_1);
1877 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1878 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1879 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1880 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1881 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1882 geturls_text.perform();
1883 retrieved_headers.seek(0);
1884 if(sys.version[0]=="2"):
1885 pycurlhead = retrieved_headers.read();
1886 if(sys.version[0]>="3"):
1887 pycurlhead = retrieved_headers.read().decode('UTF-8');
1888 pyhttpverinfo = re.findall(r'^HTTP/([0-9.]+) (\d+) ([A-Za-z\s]+)$', pycurlhead.splitlines()[0])[0];
1889 pycurlheadersout = make_http_headers_from_pycurl_to_dict(pycurlhead);
1890 retrieved_body.seek(0);
1891 except socket.timeout:
1892 log.info("Error With URL "+httpurl);
1893 return False;
1894 except socket.gaierror:
1895 log.info("Error With URL "+httpurl);
1896 return False;
1897 except ValueError:
1898 log.info("Error With URL "+httpurl);
1899 return False;
1900 httpcodeout = geturls_text.getinfo(geturls_text.HTTP_CODE);
1901 httpcodereason = http_status_to_reason(geturls_text.getinfo(geturls_text.HTTP_CODE));
1902 httpversionout = "1.1";
1903 httpmethodout = httpmethod;
1904 httpurlout = geturls_text.getinfo(geturls_text.EFFECTIVE_URL);
1905 httpheaderout = pycurlheadersout;
1906 httpheadersentout = httpheaders;
1907 elif(httplibuse=="pycurl2"):
1908 retrieved_body = BytesIO();
1909 retrieved_headers = BytesIO();
1910 try:
1911 if(httpmethod=="GET"):
1912 geturls_text = pycurl.Curl();
1913 geturls_text.setopt(geturls_text.URL, httpurl);
1914 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_2_0);
1915 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1916 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1917 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1918 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1919 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1920 geturls_text.perform();
1921 elif(httpmethod=="POST"):
1922 geturls_text = pycurl.Curl();
1923 geturls_text.setopt(geturls_text.URL, httpurl);
1924 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_2_0);
1925 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1926 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1927 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1928 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1929 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1930 geturls_text.setopt(geturls_text.POST, True);
1931 geturls_text.setopt(geturls_text.POSTFIELDS, postdata);
1932 geturls_text.perform();
1933 else:
1934 geturls_text = pycurl.Curl();
1935 geturls_text.setopt(geturls_text.URL, httpurl);
1936 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_2_0);
1937 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1938 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1939 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1940 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1941 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1942 geturls_text.perform();
1943 retrieved_headers.seek(0);
1944 if(sys.version[0]=="2"):
1945 pycurlhead = retrieved_headers.read();
1946 if(sys.version[0]>="3"):
1947 pycurlhead = retrieved_headers.read().decode('UTF-8');
1948 pyhttpverinfo = re.findall(r'^HTTP/([0-9.]+) (\d+) ([A-Za-z\s]+)$', pycurlhead.splitlines()[0])[0];
1949 pycurlheadersout = make_http_headers_from_pycurl_to_dict(pycurlhead);
1950 retrieved_body.seek(0);
1951 except socket.timeout:
1952 log.info("Error With URL "+httpurl);
1953 return False;
1954 except socket.gaierror:
1955 log.info("Error With URL "+httpurl);
1956 return False;
1957 except ValueError:
1958 log.info("Error With URL "+httpurl);
1959 return False;
1960 httpcodeout = geturls_text.getinfo(geturls_text.HTTP_CODE);
1961 httpcodereason = http_status_to_reason(geturls_text.getinfo(geturls_text.HTTP_CODE));
1962 httpversionout = "1.1";
1963 httpmethodout = httpmethod;
1964 httpurlout = geturls_text.getinfo(geturls_text.EFFECTIVE_URL);
1965 httpheaderout = pycurlheadersout;
1966 httpheadersentout = httpheaders;
1967 elif(httplibuse=="pycurl3"):
1968 retrieved_body = BytesIO();
1969 retrieved_headers = BytesIO();
1970 try:
1971 if(httpmethod=="GET"):
1972 geturls_text = pycurl.Curl();
1973 geturls_text.setopt(geturls_text.URL, httpurl);
1974 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_3_0);
1975 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1976 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1977 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1978 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1979 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1980 geturls_text.perform();
1981 elif(httpmethod=="POST"):
1982 geturls_text = pycurl.Curl();
1983 geturls_text.setopt(geturls_text.URL, httpurl);
1984 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_3_0);
1985 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1986 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1987 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
1988 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
1989 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
1990 geturls_text.setopt(geturls_text.POST, True);
1991 geturls_text.setopt(geturls_text.POSTFIELDS, postdata);
1992 geturls_text.perform();
1993 else:
1994 geturls_text = pycurl.Curl();
1995 geturls_text.setopt(geturls_text.URL, httpurl);
1996 geturls_text.setopt(geturls_text.HTTP_VERSION, geturls_text.CURL_HTTP_VERSION_3_0);
1997 geturls_text.setopt(geturls_text.WRITEFUNCTION, retrieved_body.write);
1998 geturls_text.setopt(geturls_text.HTTPHEADER, httpheaders);
1999 geturls_text.setopt(geturls_text.HEADERFUNCTION, retrieved_headers.write);
2000 geturls_text.setopt(geturls_text.FOLLOWLOCATION, True);
2001 geturls_text.setopt(geturls_text.TIMEOUT, timeout);
2002 geturls_text.perform();
2003 retrieved_headers.seek(0);
2004 if(sys.version[0]=="2"):
2005 pycurlhead = retrieved_headers.read();
2006 if(sys.version[0]>="3"):
2007 pycurlhead = retrieved_headers.read().decode('UTF-8');
2008 pyhttpverinfo = re.findall(r'^HTTP/([0-9.]+) (\d+) ([A-Za-z\s]+)$', pycurlhead.splitlines()[0])[0];
2009 pycurlheadersout = make_http_headers_from_pycurl_to_dict(pycurlhead);
2010 retrieved_body.seek(0);
2011 except socket.timeout:
2012 log.info("Error With URL "+httpurl);
2013 return False;
2014 except socket.gaierror:
2015 log.info("Error With URL "+httpurl);
2016 return False;
2017 except ValueError:
2018 log.info("Error With URL "+httpurl);
2019 return False;
2020 httpcodeout = geturls_text.getinfo(geturls_text.HTTP_CODE);
2021 httpcodereason = http_status_to_reason(geturls_text.getinfo(geturls_text.HTTP_CODE));
2022 httpversionout = "1.1";
2023 httpmethodout = httpmethod;
2024 httpurlout = geturls_text.getinfo(geturls_text.EFFECTIVE_URL);
2025 httpheaderout = pycurlheadersout;
2026 httpheadersentout = httpheaders;
2027 elif(httplibuse=="ftp"):
2028 geturls_text = download_file_from_ftp_file(httpurl);
2029 if(not geturls_text):
2030 return False;
2031 geturls_text.seek(0, 2);
2032 downloadsize = int(geturls_text.tell());
2033 geturls_text.seek(0, 0);
2034 elif(httplibuse=="sftp"):
2035 geturls_text = download_file_from_sftp_file(httpurl);
2036 if(not geturls_text):
2037 return False;
2038 geturls_text.seek(0, 2);
2039 downloadsize = int(geturls_text.tell());
2040 geturls_text.seek(0, 0);
2041 if(downloadsize is not None):
2042 downloadsize = int(downloadsize);
2043 if downloadsize is None: downloadsize = 0;
2044 fulldatasize = 0;
2045 prevdownsize = 0;
2046 elif(httplibuse=="pysftp"):
2047 geturls_text = download_file_from_pysftp_file(httpurl);
2048 if(not geturls_text):
2049 return False;
2050 geturls_text.seek(0, 2);
2051 downloadsize = int(geturls_text.tell());
2052 geturls_text.seek(0, 0);
2053 else:
2054 returnval = False;
2055 if(isinstance(httpheaderout, list) and (httplibuse!="pycurl" and httplibuse!="pycurl2" and httplibuse!="pycurl3")):
2056 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
2057 if(isinstance(httpheaderout, list) and (httplibuse=="pycurl" or httplibuse=="pycurl2" or httplibuse=="pycurl3")):
2058 httpheaderout = dict(make_http_headers_from_pycurl_to_dict("\r\n".join(httpheaderout)));
2059 if(sys.version[0]=="2"):
2060 try:
2061 prehttpheaderout = httpheaderout;
2062 httpheaderkeys = httpheaderout.keys();
2063 imax = len(httpheaderkeys);
2064 ic = 0;
2065 httpheaderout = {};
2066 while(ic < imax):
2067 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
2068 ic += 1;
2069 except AttributeError:
2070 pass;
2071 httpheaderout = fix_header_names(httpheaderout);
2072 if(isinstance(httpheadersentout, list) and (httplibuse!="pycurl" and httplibuse!="pycurl2" and httplibuse!="pycurl3")):
2073 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
2074 if(isinstance(httpheadersentout, list) and (httplibuse=="pycurl" or httplibuse=="pycurl2" or httplibuse=="pycurl3")):
2075 httpheadersentout = dict(make_http_headers_from_pycurl_to_dict("\r\n".join(httpheadersentout)));
2076 httpheadersentout = fix_header_names(httpheadersentout);
2077 if(httplibuse=="urllib" or httplibuse=="request" or httplibuse=="request3" or httplibuse=="httplib" or httplibuse=="httplib2" or httplibuse=="urllib3" or httplibuse=="requests" or httplibuse=="mechanize" or httplibuse=="httpx" or httplibuse=="httpx2" or httplibuse=="httpcore" or httplibuse=="httpcore2"):
2078 downloadsize = httpheaderout.get('Content-Length');
2079 if(downloadsize is not None):
2080 downloadsize = int(downloadsize);
2081 if downloadsize is None: downloadsize = 0;
2082 fulldatasize = 0;
2083 prevdownsize = 0;
2084 log.info("Downloading URL "+httpurl);
2085 if(httplibuse=="urllib" or httplibuse=="request" or httplibuse=="request3" or httplibuse=="httplib" or httplibuse=="httplib2" or httplibuse=="urllib3" or httplibuse=="mechanize" or httplibuse=="httpx" or httplibuse=="httpx2" or httplibuse=="httpcore" or httplibuse=="httpcore2" or httplibuse=="ftp" or httplibuse=="sftp" or httplibuse=="pysftp"):
2086 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
2087 tmpfilename = f.name;
2088 try:
2089 os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple())));
2090 except AttributeError:
2091 try:
2092 os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
2093 except ValueError:
2094 pass;
2095 except ValueError:
2096 pass;
2097 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason};
2098 while True:
2099 databytes = geturls_text.read(buffersize);
2100 if not databytes: break;
2101 datasize = len(databytes);
2102 fulldatasize = datasize + fulldatasize;
2103 percentage = "";
2104 if(downloadsize>0):
2105 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
2106 downloaddiff = fulldatasize - prevdownsize;
2107 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
2108 prevdownsize = fulldatasize;
2109 f.write(databytes);
2110 f.close();
2111 elif(httplibuse=="requests"):
2112 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
2113 tmpfilename = f.name;
2114 try:
2115 os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple())));
2116 except AttributeError:
2117 try:
2118 os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
2119 except ValueError:
2120 pass;
2121 except ValueError:
2122 pass;
2123 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason};
2124 while True:
2125 databytes = geturls_text.raw.read(buffersize);
2126 if not databytes: break;
2127 datasize = len(databytes);
2128 fulldatasize = datasize + fulldatasize;
2129 percentage = "";
2130 if(downloadsize>0):
2131 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
2132 downloaddiff = fulldatasize - prevdownsize;
2133 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
2134 prevdownsize = fulldatasize;
2135 f.write(databytes);
2136 f.close();
2137 elif(httplibuse=="pycurl" or httplibuse=="pycurl2" or httplibuse=="pycurl3"):
2138 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
2139 tmpfilename = f.name;
2140 try:
2141 os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple())));
2142 except AttributeError:
2143 try:
2144 os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
2145 except ValueError:
2146 pass;
2147 except ValueError:
2148 pass;
2149 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout, 'Reason': httpcodereason};
2150 while True:
2151 databytes = retrieved_body.read(buffersize);
2152 if not databytes: break;
2153 datasize = len(databytes);
2154 fulldatasize = datasize + fulldatasize;
2155 percentage = "";
2156 if(downloadsize>0):
2157 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
2158 downloaddiff = fulldatasize - prevdownsize;
2159 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
2160 prevdownsize = fulldatasize;
2161 f.write(databytes);
2162 f.close();
2163 else:
2164 pass;
2165 geturls_text.close();
2166 exec_time_end = time.time();
2167 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to download file.");
2168 returnval.update({'Filesize': os.path.getsize(tmpfilename), 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)});
2169 return returnval;
2171 def download_from_url_to_file(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2172 global geturls_download_sleep, haverequests, havemechanize, havepycurl, havehttplib2, haveurllib3, havehttpx, havehttpcore, haveparamiko, havepysftp;
2173 if(sleep<0):
2174 sleep = geturls_download_sleep;
2175 if(timeout<=0):
2176 timeout = 10;
2177 if(httplibuse=="urllib1" or httplibuse=="urllib2" or httplibuse=="request"):
2178 httplibuse = "urllib";
2179 if(httplibuse=="httplib1"):
2180 httplibuse = "httplib";
2181 if(not haverequests and httplibuse=="requests"):
2182 httplibuse = "urllib";
2183 if(not havehttpx and httplibuse=="httpx"):
2184 httplibuse = "urllib";
2185 if(not havehttpx and httplibuse=="httpx2"):
2186 httplibuse = "urllib";
2187 if(not havehttpcore and httplibuse=="httpcore"):
2188 httplibuse = "urllib";
2189 if(not havehttpcore and httplibuse=="httpcore2"):
2190 httplibuse = "urllib";
2191 if(not havemechanize and httplibuse=="mechanize"):
2192 httplibuse = "urllib";
2193 if(not havepycurl and httplibuse=="pycurl"):
2194 httplibuse = "urllib";
2195 if(not havepycurl and httplibuse=="pycurl2"):
2196 httplibuse = "urllib";
2197 if(not havepycurl and httplibuse=="pycurl3"):
2198 httplibuse = "urllib";
2199 if(not havehttplib2 and httplibuse=="httplib2"):
2200 httplibuse = "httplib";
2201 if(not haveparamiko and httplibuse=="sftp"):
2202 httplibuse = "ftp";
2203 if(not havepysftp and httplibuse=="pysftp"):
2204 httplibuse = "ftp";
2205 if(not outfile=="-"):
2206 outpath = outpath.rstrip(os.path.sep);
2207 filepath = os.path.realpath(outpath+os.path.sep+outfile);
2208 if(not os.path.exists(outpath)):
2209 os.makedirs(outpath);
2210 if(os.path.exists(outpath) and os.path.isfile(outpath)):
2211 return False;
2212 if(os.path.exists(filepath) and os.path.isdir(filepath)):
2213 return False;
2214 pretmpfilename = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, httplibuse, ranges, buffersize[0], sleep, timeout);
2215 if(not pretmpfilename):
2216 return False;
2217 tmpfilename = pretmpfilename['Filename'];
2218 downloadsize = int(os.path.getsize(tmpfilename));
2219 fulldatasize = 0;
2220 log.info("Moving file "+tmpfilename+" to "+filepath);
2221 exec_time_start = time.time();
2222 shutil.move(tmpfilename, filepath);
2223 try:
2224 os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple())));
2225 except AttributeError:
2226 try:
2227 os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
2228 except ValueError:
2229 pass;
2230 except ValueError:
2231 pass;
2232 exec_time_end = time.time();
2233 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to move file.");
2234 if(os.path.exists(tmpfilename)):
2235 os.remove(tmpfilename);
2236 returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code'], 'Reason': pretmpfilename['Reason']};
2237 if(outfile=="-"):
2238 pretmpfilename = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, httplibuse, ranges, buffersize[0], sleep, timeout);
2239 tmpfilename = pretmpfilename['Filename'];
2240 downloadsize = int(os.path.getsize(tmpfilename));
2241 fulldatasize = 0;
2242 prevdownsize = 0;
2243 exec_time_start = time.time();
2244 with open(tmpfilename, 'rb') as ft:
2245 f = BytesIO();
2246 while True:
2247 databytes = ft.read(buffersize[1]);
2248 if not databytes: break;
2249 datasize = len(databytes);
2250 fulldatasize = datasize + fulldatasize;
2251 percentage = "";
2252 if(downloadsize>0):
2253 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
2254 downloaddiff = fulldatasize - prevdownsize;
2255 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
2256 prevdownsize = fulldatasize;
2257 f.write(databytes);
2258 f.seek(0);
2259 fdata = f.getvalue();
2260 f.close();
2261 ft.close();
2262 os.remove(tmpfilename);
2263 exec_time_end = time.time();
2264 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
2265 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code'], 'Reason': pretmpfilename['Reason']};
2266 return returnval;
2268 def download_from_url_with_urllib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2269 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib", buffersize, sleep, timeout);
2270 return returnval;
2272 def download_from_url_with_request(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2273 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib", buffersize, sleep, timeout);
2274 return returnval;
2276 def download_from_url_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2277 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "request3", buffersize, sleep, timeout);
2278 return returnval;
2280 def download_from_url_with_httplib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2281 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httplib", buffersize, sleep, timeout);
2282 return returnval;
2284 def download_from_url_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2285 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httplib2", buffersize, sleep, timeout);
2286 return returnval;
2288 def download_from_url_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2289 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib3", buffersize, sleep, timeout);
2290 return returnval;
2292 def download_from_url_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2293 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "requests", buffersize, sleep, timeout);
2294 return returnval;
2296 def download_from_url_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2297 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpx", buffersize, sleep, timeout);
2298 return returnval;
2300 def download_from_url_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2301 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpx2", buffersize, sleep, timeout);
2302 return returnval;
2304 def download_from_url_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2305 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpcore", buffersize, sleep, timeout);
2306 return returnval;
2308 def download_from_url_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2309 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpcore2", buffersize, sleep, timeout);
2310 return returnval;
2312 def download_from_url_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2313 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "mechanize", buffersize, sleep, timeout);
2314 return returnval;
2316 def download_from_url_with_pycurl(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2317 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl", buffersize, sleep, timeout);
2318 return returnval;
2320 def download_from_url_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2321 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl2", buffersize, sleep, timeout);
2322 return returnval;
2324 def download_from_url_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2325 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl3", buffersize, sleep, timeout);
2326 return returnval;
2328 def download_from_url_with_ftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2329 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "ftp", buffersize, sleep, timeout);
2330 return returnval;
2332 def download_from_url_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2333 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "sftp", buffersize, sleep, timeout);
2334 return returnval;
2336 def download_from_url_with_pysftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1, timeout=10):
2337 returnval = download_from_url(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pysftp", buffersize, sleep, timeout);
2338 return returnval;
2340 def download_from_url_file_with_urllib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2341 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib", ranges, buffersize, sleep, timeout);
2342 return returnval;
2344 def download_from_url_file_with_request(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2345 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib", ranges, buffersize, sleep, timeout);
2346 return returnval;
2348 def download_from_url_file_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2349 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "request3", ranges, buffersize, sleep, timeout);
2350 return returnval;
2352 def download_from_url_file_with_httplib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2353 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httplib", ranges, buffersize, sleep, timeout);
2354 return returnval;
2356 def download_from_url_file_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2357 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httplib2", ranges, buffersize, sleep, timeout);
2358 return returnval;
2360 def download_from_url_file_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2361 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib3", ranges, buffersize, sleep, timeout);
2362 return returnval;
2364 def download_from_url_file_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2365 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "requests", ranges, buffersize, sleep, timeout);
2366 return returnval;
2368 def download_from_url_file_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2369 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpx", ranges, buffersize, sleep, timeout);
2370 return returnval;
2372 def download_from_url_file_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2373 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpx2", ranges, buffersize, sleep, timeout);
2374 return returnval;
2376 def download_from_url_file_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2377 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpcore", ranges, buffersize, sleep, timeout);
2378 return returnval;
2380 def download_from_url_file_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2381 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpcore2", ranges, buffersize, sleep, timeout);
2382 return returnval;
2384 def download_from_url_file_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2385 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "mechanize", ranges, buffersize, sleep, timeout);
2386 return returnval;
2388 def download_from_url_file_with_pycurl(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2389 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl", ranges, buffersize, sleep, timeout);
2390 return returnval;
2392 def download_from_url_file_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2393 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl2", ranges, buffersize, sleep, timeout);
2394 return returnval;
2396 def download_from_url_file_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2397 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl3", ranges, buffersize, sleep, timeout);
2398 return returnval;
2400 def download_from_url_file_with_ftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2401 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "ftp", ranges, buffersize, sleep, timeout);
2402 return returnval;
2404 def download_from_url_file_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2405 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "sftp", ranges, buffersize, sleep, timeout);
2406 return returnval;
2408 def download_from_url_file_with_pysftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, ranges=[None, None], buffersize=524288, sleep=-1, timeout=10):
2409 returnval = download_from_url_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pysftp", ranges, buffersize, sleep, timeout);
2410 return returnval;
2412 def download_from_url_to_file_with_urllib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2413 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib", outfile, outpath, ranges, buffersize, sleep, timeout);
2414 return returnval;
2416 def download_from_url_to_file_with_request(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2417 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "request", outfile, outpath, ranges, buffersize, sleep, timeout);
2418 return returnval;
2420 def download_from_url_to_file_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2421 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib", outfile, outpath, ranges, buffersize, sleep, timeout);
2422 return returnval;
2424 def download_from_url_to_file_with_httplib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2425 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httplib", outfile, outpath, ranges, buffersize, sleep, timeout);
2426 return returnval;
2428 def download_from_url_to_file_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2429 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httplib2", outfile, outpath, ranges, buffersize, sleep, timeout);
2430 return returnval;
2432 def download_from_url_to_file_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2433 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "urllib3", outfile, outpath, ranges, buffersize, sleep, timeout);
2434 return returnval;
2436 def download_from_url_to_file_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2437 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "requests", outfile, outpath, ranges, buffersize, sleep, timeout);
2438 return returnval;
2440 def download_from_url_to_file_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2441 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpx", outfile, outpath, ranges, buffersize, sleep, timeout);
2442 return returnval;
2444 def download_from_url_to_file_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2445 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpx2", outfile, outpath, ranges, buffersize, sleep, timeout);
2446 return returnval;
2448 def download_from_url_to_file_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2449 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpcore", outfile, outpath, ranges, buffersize, sleep, timeout);
2450 return returnval;
2452 def download_from_url_to_file_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2453 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "httpcore2", outfile, outpath, ranges, buffersize, sleep, timeout);
2454 return returnval;
2456 def download_from_url_to_file_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2457 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "mechanize", outfile, outpath, ranges, buffersize, sleep, timeout);
2458 return returnval;
2460 def download_from_url_to_file_with_pycurl(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2461 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl", outfile, outpath, ranges, buffersize, sleep, timeout);
2462 return returnval;
2464 def download_from_url_to_file_with_pycurl2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2465 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl2", outfile, outpath, ranges, buffersize, sleep, timeout);
2466 return returnval;
2468 def download_from_url_to_file_with_pycurl3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2469 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pycurl3", outfile, outpath, ranges, buffersize, sleep, timeout);
2470 return returnval;
2472 def download_from_url_to_file_with_ftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2473 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "ftp", outfile, outpath, ranges, buffersize, sleep, timeout);
2474 return returnval;
2476 def download_from_url_to_file_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2477 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "sftp", outfile, outpath, ranges, buffersize, sleep, timeout);
2478 return returnval;
2480 def download_from_url_to_file_with_pysftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), ranges=[None, None], buffersize=[524288, 524288], sleep=-1, timeout=10):
2481 returnval = download_from_url_to_file(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, "pysftp", outfile, outpath, ranges, buffersize, sleep, timeout);
2482 return returnval;
2484 def download_file_from_ftp_file(url):
2485 urlparts = urlparse.urlparse(url);
2486 file_name = os.path.basename(urlparts.path);
2487 file_dir = os.path.dirname(urlparts.path);
2488 if(urlparts.username is not None):
2489 ftp_username = urlparts.username;
2490 else:
2491 ftp_username = "anonymous";
2492 if(urlparts.password is not None):
2493 ftp_password = urlparts.password;
2494 elif(urlparts.password is None and urlparts.username=="anonymous"):
2495 ftp_password = "anonymous";
2496 else:
2497 ftp_password = "";
2498 if(urlparts.scheme=="ftp"):
2499 ftp = FTP();
2500 elif(urlparts.scheme=="ftps"):
2501 ftp = FTP_TLS();
2502 else:
2503 return False;
2504 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
2505 return False;
2506 ftp_port = urlparts.port;
2507 if(urlparts.port is None):
2508 ftp_port = 21;
2509 try:
2510 ftp.connect(urlparts.hostname, ftp_port);
2511 except socket.gaierror:
2512 log.info("Error With URL "+httpurl);
2513 return False;
2514 except socket.timeout:
2515 log.info("Error With URL "+httpurl);
2516 return False;
2517 ftp.login(urlparts.username, urlparts.password);
2518 if(urlparts.scheme=="ftps"):
2519 ftp.prot_p();
2520 ftpfile = BytesIO();
2521 ftp.retrbinary("RETR "+urlparts.path, ftpfile.write);
2522 #ftp.storbinary("STOR "+urlparts.path, ftpfile.write);
2523 ftp.close();
2524 ftpfile.seek(0, 0);
2525 return ftpfile;
2527 def download_file_from_ftp_string(url):
2528 ftpfile = download_file_from_ftp_file(url);
2529 return ftpfile.read();
2531 def upload_file_to_ftp_file(ftpfile, url):
2532 urlparts = urlparse.urlparse(url);
2533 file_name = os.path.basename(urlparts.path);
2534 file_dir = os.path.dirname(urlparts.path);
2535 if(urlparts.username is not None):
2536 ftp_username = urlparts.username;
2537 else:
2538 ftp_username = "anonymous";
2539 if(urlparts.password is not None):
2540 ftp_password = urlparts.password;
2541 elif(urlparts.password is None and urlparts.username=="anonymous"):
2542 ftp_password = "anonymous";
2543 else:
2544 ftp_password = "";
2545 if(urlparts.scheme=="ftp"):
2546 ftp = FTP();
2547 elif(urlparts.scheme=="ftps"):
2548 ftp = FTP_TLS();
2549 else:
2550 return False;
2551 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
2552 return False;
2553 ftp_port = urlparts.port;
2554 if(urlparts.port is None):
2555 ftp_port = 21;
2556 try:
2557 ftp.connect(urlparts.hostname, ftp_port);
2558 except socket.gaierror:
2559 log.info("Error With URL "+httpurl);
2560 return False;
2561 except socket.timeout:
2562 log.info("Error With URL "+httpurl);
2563 return False;
2564 ftp.login(urlparts.username, urlparts.password);
2565 if(urlparts.scheme=="ftps"):
2566 ftp.prot_p();
2567 ftp.storbinary("STOR "+urlparts.path, ftpfile);
2568 ftp.close();
2569 ftpfile.seek(0, 0);
2570 return ftpfile;
2572 def upload_file_to_ftp_string(ftpstring, url):
2573 ftpfileo = BytesIO(ftpstring);
2574 ftpfile = upload_file_to_ftp_file(ftpfileo, url);
2575 ftpfileo.close();
2576 return ftpfile;
2578 if(haveparamiko):
2579 def download_file_from_sftp_file(url):
2580 urlparts = urlparse.urlparse(url);
2581 file_name = os.path.basename(urlparts.path);
2582 file_dir = os.path.dirname(urlparts.path);
2583 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
2584 return False;
2585 sftp_port = urlparts.port;
2586 if(urlparts.port is None):
2587 sftp_port = 22;
2588 else:
2589 sftp_port = urlparts.port;
2590 if(urlparts.username is not None):
2591 sftp_username = urlparts.username;
2592 else:
2593 sftp_username = "anonymous";
2594 if(urlparts.password is not None):
2595 sftp_password = urlparts.password;
2596 elif(urlparts.password is None and urlparts.username=="anonymous"):
2597 sftp_password = "anonymous";
2598 else:
2599 sftp_password = "";
2600 if(urlparts.scheme!="sftp"):
2601 return False;
2602 ssh = paramiko.SSHClient();
2603 ssh.load_system_host_keys();
2604 ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy());
2605 try:
2606 ssh.connect(urlparts.hostname, port=sftp_port, username=urlparts.username, password=urlparts.password);
2607 except paramiko.ssh_exception.SSHException:
2608 return False;
2609 except socket.gaierror:
2610 log.info("Error With URL "+httpurl);
2611 return False;
2612 except socket.timeout:
2613 log.info("Error With URL "+httpurl);
2614 return False;
2615 sftp = ssh.open_sftp();
2616 sftpfile = BytesIO();
2617 sftp.getfo(urlparts.path, sftpfile);
2618 sftp.close();
2619 ssh.close();
2620 sftpfile.seek(0, 0);
2621 return sftpfile;
2622 else:
2623 def download_file_from_sftp_file(url):
2624 return False;
2626 if(haveparamiko):
2627 def download_file_from_sftp_string(url):
2628 sftpfile = download_file_from_sftp_file(url);
2629 return sftpfile.read();
2630 else:
2631 def download_file_from_ftp_string(url):
2632 return False;
2634 if(haveparamiko):
2635 def upload_file_to_sftp_file(sftpfile, url):
2636 urlparts = urlparse.urlparse(url);
2637 file_name = os.path.basename(urlparts.path);
2638 file_dir = os.path.dirname(urlparts.path);
2639 sftp_port = urlparts.port;
2640 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
2641 return False;
2642 if(urlparts.port is None):
2643 sftp_port = 22;
2644 else:
2645 sftp_port = urlparts.port;
2646 if(urlparts.username is not None):
2647 sftp_username = urlparts.username;
2648 else:
2649 sftp_username = "anonymous";
2650 if(urlparts.password is not None):
2651 sftp_password = urlparts.password;
2652 elif(urlparts.password is None and urlparts.username=="anonymous"):
2653 sftp_password = "anonymous";
2654 else:
2655 sftp_password = "";
2656 if(urlparts.scheme!="sftp"):
2657 return False;
2658 ssh = paramiko.SSHClient();
2659 ssh.load_system_host_keys();
2660 ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy());
2661 try:
2662 ssh.connect(urlparts.hostname, port=sftp_port, username=urlparts.username, password=urlparts.password);
2663 except paramiko.ssh_exception.SSHException:
2664 return False;
2665 except socket.gaierror:
2666 log.info("Error With URL "+httpurl);
2667 return False;
2668 except socket.timeout:
2669 log.info("Error With URL "+httpurl);
2670 return False;
2671 sftp = ssh.open_sftp();
2672 sftp.putfo(sftpfile, urlparts.path);
2673 sftp.close();
2674 ssh.close();
2675 sftpfile.seek(0, 0);
2676 return sftpfile;
2677 else:
2678 def upload_file_to_sftp_file(sftpfile, url):
2679 return False;
2681 if(haveparamiko):
2682 def upload_file_to_sftp_string(sftpstring, url):
2683 sftpfileo = BytesIO(sftpstring);
2684 sftpfile = upload_file_to_sftp_files(ftpfileo, url);
2685 sftpfileo.close();
2686 return sftpfile;
2687 else:
2688 def upload_file_to_sftp_string(url):
2689 return False;
2692 if(havepysftp):
2693 def download_file_from_pysftp_file(url):
2694 urlparts = urlparse.urlparse(url);
2695 file_name = os.path.basename(urlparts.path);
2696 file_dir = os.path.dirname(urlparts.path);
2697 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
2698 return False;
2699 sftp_port = urlparts.port;
2700 if(urlparts.port is None):
2701 sftp_port = 22;
2702 else:
2703 sftp_port = urlparts.port;
2704 if(urlparts.username is not None):
2705 sftp_username = urlparts.username;
2706 else:
2707 sftp_username = "anonymous";
2708 if(urlparts.password is not None):
2709 sftp_password = urlparts.password;
2710 elif(urlparts.password is None and urlparts.username=="anonymous"):
2711 sftp_password = "anonymous";
2712 else:
2713 sftp_password = "";
2714 if(urlparts.scheme!="sftp"):
2715 return False;
2716 try:
2717 pysftp.Connection(urlparts.hostname, port=sftp_port, username=urlparts.username, password=urlparts.password);
2718 except paramiko.ssh_exception.SSHException:
2719 return False;
2720 except socket.gaierror:
2721 log.info("Error With URL "+httpurl);
2722 return False;
2723 except socket.timeout:
2724 log.info("Error With URL "+httpurl);
2725 return False;
2726 sftp = ssh.open_sftp();
2727 sftpfile = BytesIO();
2728 sftp.getfo(urlparts.path, sftpfile);
2729 sftp.close();
2730 ssh.close();
2731 sftpfile.seek(0, 0);
2732 return sftpfile;
2733 else:
2734 def download_file_from_pysftp_file(url):
2735 return False;
2737 if(havepysftp):
2738 def download_file_from_pysftp_string(url):
2739 sftpfile = download_file_from_pysftp_file(url);
2740 return sftpfile.read();
2741 else:
2742 def download_file_from_ftp_string(url):
2743 return False;
2745 if(havepysftp):
2746 def upload_file_to_pysftp_file(sftpfile, url):
2747 urlparts = urlparse.urlparse(url);
2748 file_name = os.path.basename(urlparts.path);
2749 file_dir = os.path.dirname(urlparts.path);
2750 sftp_port = urlparts.port;
2751 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
2752 return False;
2753 if(urlparts.port is None):
2754 sftp_port = 22;
2755 else:
2756 sftp_port = urlparts.port;
2757 if(urlparts.username is not None):
2758 sftp_username = urlparts.username;
2759 else:
2760 sftp_username = "anonymous";
2761 if(urlparts.password is not None):
2762 sftp_password = urlparts.password;
2763 elif(urlparts.password is None and urlparts.username=="anonymous"):
2764 sftp_password = "anonymous";
2765 else:
2766 sftp_password = "";
2767 if(urlparts.scheme!="sftp"):
2768 return False;
2769 try:
2770 pysftp.Connection(urlparts.hostname, port=sftp_port, username=urlparts.username, password=urlparts.password);
2771 except paramiko.ssh_exception.SSHException:
2772 return False;
2773 except socket.gaierror:
2774 log.info("Error With URL "+httpurl);
2775 return False;
2776 except socket.timeout:
2777 log.info("Error With URL "+httpurl);
2778 return False;
2779 sftp = ssh.open_sftp();
2780 sftp.putfo(sftpfile, urlparts.path);
2781 sftp.close();
2782 ssh.close();
2783 sftpfile.seek(0, 0);
2784 return sftpfile;
2785 else:
2786 def upload_file_to_pysftp_file(sftpfile, url):
2787 return False;
2789 if(havepysftp):
2790 def upload_file_to_pysftp_string(sftpstring, url):
2791 sftpfileo = BytesIO(sftpstring);
2792 sftpfile = upload_file_to_pysftp_files(ftpfileo, url);
2793 sftpfileo.close();
2794 return sftpfile;
2795 else:
2796 def upload_file_to_pysftp_string(url):
2797 return False;