Add files via upload
[PyWWW-Get.git] / pywwwgetold.py
blob1d7699b3386b34a4c4fab0561b06349ba0d08fea
1 #!/usr/bin/env python
3 '''
4 This program is free software; you can redistribute it and/or modify
5 it under the terms of the Revised BSD License.
7 This program is distributed in the hope that it will be useful,
8 but WITHOUT ANY WARRANTY; without even the implied warranty of
9 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 Revised BSD License for more details.
12 Copyright 2016-2023 Game Maker 2k - https://github.com/GameMaker2k
13 Copyright 2016-2023 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
15 $FileInfo: pywwwgetold.py - Last Update: 9/24/2023 Ver. 1.5.0 RC 1 - Author: cooldude2k $
16 '''
18 from __future__ import division, absolute_import, print_function;
19 import re, os, sys, hashlib, shutil, platform, tempfile, urllib, gzip, time, argparse, cgi, subprocess, socket, email.utils, datetime, time;
20 import logging as log;
21 from ftplib import FTP, FTP_TLS;
22 from base64 import b64encode;
23 haverequests = False;
24 try:
25 import requests;
26 haverequests = True;
27 except ImportError:
28 haverequests = False;
29 havemechanize = False;
30 try:
31 import mechanize;
32 havemechanize = True;
33 except ImportError:
34 havemechanize = False;
35 haveparamiko = False;
36 try:
37 import paramiko;
38 haveparamiko = True;
39 except ImportError:
40 haveparamiko = False;
41 havepysftp = False;
42 try:
43 import pysftp;
44 havepysftp = True;
45 except ImportError:
46 havepysftp = False;
47 haveurllib3 = False;
48 try:
49 import urllib3;
50 haveurllib3 = True;
51 except ImportError:
52 haveurllib3 = False;
53 havehttplib2 = False;
54 try:
55 from httplib2 import HTTPConnectionWithTimeout, HTTPSConnectionWithTimeout;
56 havehttplib2 = True;
57 except ImportError:
58 havehttplib2 = False;
59 havehttpx = False;
60 try:
61 import httpx;
62 havehttpx = True;
63 except ImportError:
64 havehttpx = False;
65 havehttpcore = False;
66 try:
67 import httpcore;
68 havehttpcore = True;
69 except ImportError:
70 havehttpcore = False;
71 havebrotli = False;
72 try:
73 import brotli;
74 havebrotli = True;
75 except ImportError:
76 havebrotli = False;
77 havezstd = False;
78 try:
79 import zstandard;
80 havezstd = True;
81 except ImportError:
82 havezstd = False;
83 if(sys.version[0]=="2"):
84 try:
85 from cStringIO import StringIO;
86 except ImportError:
87 from StringIO import StringIO;
88 # From http://python-future.org/compatible_idioms.html
89 from urlparse import urlparse, urlunparse, urlsplit, urlunsplit, urljoin;
90 from urllib import urlencode;
91 from urllib import urlopen as urlopenalt;
92 from urllib2 import urlopen, Request, install_opener, HTTPError, URLError, build_opener, HTTPCookieProcessor;
93 import urlparse, cookielib;
94 from httplib import HTTPConnection, HTTPSConnection;
95 if(sys.version[0]>="3"):
96 from io import StringIO, BytesIO;
97 # From http://python-future.org/compatible_idioms.html
98 from urllib.parse import urlparse, urlunparse, urlsplit, urlunsplit, urljoin, urlencode;
99 from urllib.request import urlopen, Request, install_opener, build_opener, HTTPCookieProcessor;
100 from urllib.error import HTTPError, URLError;
101 import urllib.parse as urlparse;
102 import http.cookiejar as cookielib;
103 from http.client import HTTPConnection, HTTPSConnection;
105 __program_name__ = "PyWWW-Get";
106 __program_alt_name__ = "PyWWWGet";
107 __program_small_name__ = "wwwget";
108 __project__ = __program_name__;
109 __project_url__ = "https://github.com/GameMaker2k/PyWWW-Get";
110 __version_info__ = (1, 5, 0, "RC 1", 1);
111 __version_date_info__ = (2023, 9, 24, "RC 1", 1);
112 __version_date__ = str(__version_date_info__[0])+"."+str(__version_date_info__[1]).zfill(2)+"."+str(__version_date_info__[2]).zfill(2);
113 __revision__ = __version_info__[3];
114 __revision_id__ = "$Id$";
115 if(__version_info__[4] is not None):
116 __version_date_plusrc__ = __version_date__+"-"+str(__version_date_info__[4]);
117 if(__version_info__[4] is None):
118 __version_date_plusrc__ = __version_date__;
119 if(__version_info__[3] is not None):
120 __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2])+" "+str(__version_info__[3]);
121 if(__version_info__[3] is None):
122 __version__ = str(__version_info__[0])+"."+str(__version_info__[1])+"."+str(__version_info__[2]);
124 tmpfileprefix = "py"+str(sys.version_info[0])+__program_small_name__+str(__version_info__[0])+"-";
125 tmpfilesuffix = "-";
126 pytempdir = tempfile.gettempdir();
128 PyBitness = platform.architecture();
129 if(PyBitness=="32bit" or PyBitness=="32"):
130 PyBitness = "32";
131 elif(PyBitness=="64bit" or PyBitness=="64"):
132 PyBitness = "64";
133 else:
134 PyBitness = "32";
136 compression_supported = "gzip, deflate";
137 if(havebrotli):
138 compression_supported = "gzip, deflate, br";
139 else:
140 compression_supported = "gzip, deflate";
142 geturls_cj = cookielib.CookieJar();
143 windowsNT4_ua_string = "Windows NT 4.0";
144 windowsNT4_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "4.0.0"};
145 windows2k_ua_string = "Windows NT 5.0";
146 windows2k_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "5.0.0"};
147 windowsXP_ua_string = "Windows NT 5.1";
148 windowsXP_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "32", 'SEC-CH-UA-PLATFORM': "5.1.0"};
149 windowsXP64_ua_string = "Windows NT 5.2; Win64; x64";
150 windowsXP64_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "5.1.0"};
151 windows7_ua_string = "Windows NT 6.1; Win64; x64";
152 windows7_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "6.1.0"};
153 windows8_ua_string = "Windows NT 6.2; Win64; x64";
154 windows8_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "6.2.0"};
155 windows81_ua_string = "Windows NT 6.3; Win64; x64";
156 windows81_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "6.3.0"};
157 windows10_ua_string = "Windows NT 10.0; Win64; x64";
158 windows10_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "10.0.0"};
159 windows11_ua_string = "Windows NT 11.0; Win64; x64";
160 windows11_ua_addon = {'SEC-CH-UA-PLATFORM': "Windows", 'SEC-CH-UA-ARCH': "x86", 'SEC-CH-UA-BITNESS': "64", 'SEC-CH-UA-PLATFORM': "11.0.0"};
161 geturls_ua_firefox_windows7 = "Mozilla/5.0 ("+windows7_ua_string+"; rv:109.0) Gecko/20100101 Firefox/117.0";
162 geturls_ua_seamonkey_windows7 = "Mozilla/5.0 ("+windows7_ua_string+"; rv:91.0) Gecko/20100101 Firefox/91.0 SeaMonkey/2.53.17";
163 geturls_ua_chrome_windows7 = "Mozilla/5.0 ("+windows7_ua_string+") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36";
164 geturls_ua_chromium_windows7 = "Mozilla/5.0 ("+windows7_ua_string+") AppleWebKit/537.36 (KHTML, like Gecko) Chromium/117.0.0.0 Chrome/117.0.0.0 Safari/537.36";
165 geturls_ua_palemoon_windows7 = "Mozilla/5.0 ("+windows7_ua_string+"; rv:102.0) Gecko/20100101 Goanna/6.3 Firefox/102.0 PaleMoon/32.4.0.1";
166 geturls_ua_opera_windows7 = "Mozilla/5.0 ("+windows7_ua_string+") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36 OPR/102.0.0.0";
167 geturls_ua_vivaldi_windows7 = "Mozilla/5.0 ("+windows7_ua_string+") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36 Vivaldi/6.2.3105.48";
168 geturls_ua_internet_explorer_windows7 = "Mozilla/5.0 ("+windows7_ua_string+"; Trident/7.0; rv:11.0) like Gecko";
169 geturls_ua_microsoft_edge_windows7 = "Mozilla/5.0 ("+windows7_ua_string+") AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Safari/537.36 Edg/117.0.2045.31";
170 geturls_ua_pywwwget_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prourl})".format(proname=__project__, prover=__version__, prourl=__project_url__);
171 if(platform.python_implementation()!=""):
172 py_implementation = platform.python_implementation();
173 if(platform.python_implementation()==""):
174 py_implementation = "Python";
175 geturls_ua_pywwwget_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system()+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__);
176 geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)";
177 geturls_ua_googlebot_google_old = "Googlebot/2.1 (+http://www.google.com/bot.html)";
178 geturls_ua = geturls_ua_firefox_windows7;
179 geturls_headers_firefox_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_firefox_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
180 geturls_headers_seamonkey_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_seamonkey_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
181 geturls_headers_chrome_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_chrome_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Google Chrome\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Chromium\";v=\"117\"", 'SEC-CH-UA-FULL-VERSION': "117.0.5938.63"};
182 geturls_headers_chrome_windows7.update(windows7_ua_addon);
183 geturls_headers_chromium_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_chromium_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Chromium\";v=\"117\", \"Not;A=Brand\";v=\"24\"", 'SEC-CH-UA-FULL-VERSION': "117.0.5938.63"};
184 geturls_headers_chromium_windows7.update(windows7_ua_addon);
185 geturls_headers_palemoon_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_palemoon_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
186 geturls_headers_opera_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_opera_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Chromium\";v=\"116\", \"Not;A=Brand\";v=\"8\", \"Opera\";v=\"102\"", 'SEC-CH-UA-FULL-VERSION': "102.0.4880.56"};
187 geturls_headers_opera_windows7.update(windows7_ua_addon);
188 geturls_headers_vivaldi_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_vivaldi_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Google Chrome\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Vivaldi\";v=\"6.2\"", 'SEC-CH-UA-FULL-VERSION': "6.2.3105.48"};
189 geturls_headers_vivaldi_windows7.update(windows7_ua_addon);
190 geturls_headers_internet_explorer_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_internet_explorer_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
191 geturls_headers_microsoft_edge_windows7 = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_microsoft_edge_windows7, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\"Microsoft Edge\";v=\"117\", \"Not;A=Brand\";v=\"8\", \"Chromium\";v=\"117\"", 'SEC-CH-UA-FULL-VERSION': "117.0.2045.31"}
192 geturls_headers_microsoft_edge_windows7.update(windows7_ua_addon);
193 geturls_headers_pywwwget_python = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_pywwwget_python, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\""+__project__+"\";v=\""+str(__version__)+"\", \"Not;A=Brand\";v=\"8\", \""+py_implementation+"\";v=\""+str(platform.release())+"\"", 'SEC-CH-UA-FULL-VERSION': str(__version__), 'SEC-CH-UA-PLATFORM': ""+py_implementation+"", 'SEC-CH-UA-ARCH': ""+platform.machine()+"", 'SEC-CH-UA-PLATFORM': str(__version__), 'SEC-CH-UA-BITNESS': str(PyBitness)};
194 geturls_headers_pywwwget_python_alt = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_pywwwget_python_alt, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close", 'SEC-CH-UA': "\""+__project__+"\";v=\""+str(__version__)+"\", \"Not;A=Brand\";v=\"8\", \""+py_implementation+"\";v=\""+str(platform.release())+"\"", 'SEC-CH-UA-FULL-VERSION': str(__version__), 'SEC-CH-UA-PLATFORM': ""+py_implementation+"", 'SEC-CH-UA-ARCH': ""+platform.machine()+"", 'SEC-CH-UA-PLATFORM': str(__version__), 'SEC-CH-UA-BITNESS': str(PyBitness)};
195 geturls_headers_googlebot_google = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_googlebot_google, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
196 geturls_headers_googlebot_google_old = {'Referer': "http://google.com/", 'User-Agent': geturls_ua_googlebot_google_old, 'Accept-Encoding': "none", 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"};
197 geturls_headers = geturls_headers_firefox_windows7;
198 geturls_download_sleep = 0;
200 def verbose_printout(dbgtxt, outtype="log", dbgenable=True, dgblevel=20):
201 if(outtype=="print" and dbgenable):
202 print(dbgtxt);
203 return True;
204 elif(outtype=="log" and dbgenable):
205 logging.info(dbgtxt);
206 return True;
207 elif(outtype=="warning" and dbgenable):
208 logging.warning(dbgtxt);
209 return True;
210 elif(outtype=="error" and dbgenable):
211 logging.error(dbgtxt);
212 return True;
213 elif(outtype=="critical" and dbgenable):
214 logging.critical(dbgtxt);
215 return True;
216 elif(outtype=="exception" and dbgenable):
217 logging.exception(dbgtxt);
218 return True;
219 elif(outtype=="logalt" and dbgenable):
220 logging.log(dgblevel, dbgtxt);
221 return True;
222 elif(outtype=="debug" and dbgenable):
223 logging.debug(dbgtxt);
224 return True;
225 elif(not dbgenable):
226 return True;
227 else:
228 return False;
229 return False;
231 def verbose_printout_return(dbgtxt, outtype="log", dbgenable=True, dgblevel=20):
232 dbgout = verbose_printout(dbgtxt, outtype, dbgenable, dgblevel);
233 if(not dbgout):
234 return False;
235 return dbgtxt;
237 def add_url_param(url, **params):
238 n=3;
239 parts = list(urlparse.urlsplit(url));
240 d = dict(cgi.parse_qsl(parts[n])); # use cgi.parse_qs for list values
241 d.update(params);
242 parts[n]=urlencode(d);
243 return urlparse.urlunsplit(parts);
245 os.environ["PATH"] = os.environ["PATH"] + os.pathsep + os.path.dirname(os.path.realpath(__file__)) + os.pathsep + os.getcwd();
246 def which_exec(execfile):
247 for path in os.environ["PATH"].split(":"):
248 if os.path.exists(path + "/" + execfile):
249 return path + "/" + execfile;
251 def listize(varlist):
252 il = 0;
253 ix = len(varlist);
254 ilx = 1;
255 newlistreg = {};
256 newlistrev = {};
257 newlistfull = {};
258 while(il < ix):
259 newlistreg.update({ilx: varlist[il]});
260 newlistrev.update({varlist[il]: ilx});
261 ilx = ilx + 1;
262 il = il + 1;
263 newlistfull = {1: newlistreg, 2: newlistrev, 'reg': newlistreg, 'rev': newlistrev};
264 return newlistfull;
266 def twolistize(varlist):
267 il = 0;
268 ix = len(varlist);
269 ilx = 1;
270 newlistnamereg = {};
271 newlistnamerev = {};
272 newlistdescreg = {};
273 newlistdescrev = {};
274 newlistfull = {};
275 while(il < ix):
276 newlistnamereg.update({ilx: varlist[il][0].strip()});
277 newlistnamerev.update({varlist[il][0].strip(): ilx});
278 newlistdescreg.update({ilx: varlist[il][1].strip()});
279 newlistdescrev.update({varlist[il][1].strip(): ilx});
280 ilx = ilx + 1;
281 il = il + 1;
282 newlistnametmp = {1: newlistnamereg, 2: newlistnamerev, 'reg': newlistnamereg, 'rev': newlistnamerev};
283 newlistdesctmp = {1: newlistdescreg, 2: newlistdescrev, 'reg': newlistdescreg, 'rev': newlistdescrev};
284 newlistfull = {1: newlistnametmp, 2: newlistdesctmp, 'name': newlistnametmp, 'desc': newlistdesctmp}
285 return newlistfull;
287 def arglistize(proexec, *varlist):
288 il = 0;
289 ix = len(varlist);
290 ilx = 1;
291 newarglist = [proexec];
292 while(il < ix):
293 if varlist[il][0] is not None:
294 newarglist.append(varlist[il][0]);
295 if varlist[il][1] is not None:
296 newarglist.append(varlist[il][1]);
297 il = il + 1;
298 return newarglist;
300 def fix_header_names(header_dict):
301 header_dict = {k.title(): v for k, v in header_dict.items()};
302 return header_dict;
304 # hms_string by ArcGIS Python Recipes
305 # https://arcpy.wordpress.com/2012/04/20/146/
306 def hms_string(sec_elapsed):
307 h = int(sec_elapsed / (60 * 60));
308 m = int((sec_elapsed % (60 * 60)) / 60);
309 s = sec_elapsed % 60.0;
310 return "{}:{:>02}:{:>05.2f}".format(h, m, s);
312 # get_readable_size by Lipis
313 # http://stackoverflow.com/posts/14998888/revisions
314 def get_readable_size(bytes, precision=1, unit="IEC"):
315 unit = unit.upper();
316 if(unit!="IEC" and unit!="SI"):
317 unit = "IEC";
318 if(unit=="IEC"):
319 units = [" B"," KiB"," MiB"," GiB"," TiB"," PiB"," EiB"," ZiB"];
320 unitswos = ["B","KiB","MiB","GiB","TiB","PiB","EiB","ZiB"];
321 unitsize = 1024.0;
322 if(unit=="SI"):
323 units = [" B"," kB"," MB"," GB"," TB"," PB"," EB"," ZB"];
324 unitswos = ["B","kB","MB","GB","TB","PB","EB","ZB"];
325 unitsize = 1000.0;
326 return_val = {};
327 orgbytes = bytes;
328 for unit in units:
329 if abs(bytes) < unitsize:
330 strformat = "%3."+str(precision)+"f%s";
331 pre_return_val = (strformat % (bytes, unit));
332 pre_return_val = re.sub(r"([0]+) ([A-Za-z]+)", r" \2", pre_return_val);
333 pre_return_val = re.sub(r"\. ([A-Za-z]+)", r" \1", pre_return_val);
334 alt_return_val = pre_return_val.split();
335 return_val = {'Bytes': orgbytes, 'ReadableWithSuffix': pre_return_val, 'ReadableWithoutSuffix': alt_return_val[0], 'ReadableSuffix': alt_return_val[1]}
336 return return_val;
337 bytes /= unitsize;
338 strformat = "%."+str(precision)+"f%s";
339 pre_return_val = (strformat % (bytes, "YiB"));
340 pre_return_val = re.sub(r"([0]+) ([A-Za-z]+)", r" \2", pre_return_val);
341 pre_return_val = re.sub(r"\. ([A-Za-z]+)", r" \1", pre_return_val);
342 alt_return_val = pre_return_val.split();
343 return_val = {'Bytes': orgbytes, 'ReadableWithSuffix': pre_return_val, 'ReadableWithoutSuffix': alt_return_val[0], 'ReadableSuffix': alt_return_val[1]}
344 return return_val;
346 def get_readable_size_from_file(infile, precision=1, unit="IEC", usehashes=False, usehashtypes="md5,sha1"):
347 unit = unit.upper();
348 usehashtypes = usehashtypes.lower();
349 getfilesize = os.path.getsize(infile);
350 return_val = get_readable_size(getfilesize, precision, unit);
351 if(usehashes):
352 hashtypelist = usehashtypes.split(",");
353 openfile = open(infile, "rb");
354 filecontents = openfile.read();
355 openfile.close();
356 listnumcount = 0;
357 listnumend = len(hashtypelist);
358 while(listnumcount < listnumend):
359 hashtypelistlow = hashtypelist[listnumcount].strip();
360 hashtypelistup = hashtypelistlow.upper();
361 filehash = hashlib.new(hashtypelistup);
362 filehash.update(filecontents);
363 filegethash = filehash.hexdigest();
364 return_val.update({hashtypelistup: filegethash});
365 listnumcount += 1;
366 return return_val;
368 def get_readable_size_from_string(instring, precision=1, unit="IEC", usehashes=False, usehashtypes="md5,sha1"):
369 unit = unit.upper();
370 usehashtypes = usehashtypes.lower();
371 getfilesize = len(instring);
372 return_val = get_readable_size(getfilesize, precision, unit);
373 if(usehashes):
374 hashtypelist = usehashtypes.split(",");
375 listnumcount = 0;
376 listnumend = len(hashtypelist);
377 while(listnumcount < listnumend):
378 hashtypelistlow = hashtypelist[listnumcount].strip();
379 hashtypelistup = hashtypelistlow.upper();
380 filehash = hashlib.new(hashtypelistup);
381 if(sys.version[0]=="2"):
382 filehash.update(instring);
383 if(sys.version[0]>="3"):
384 filehash.update(instring.encode('utf-8'));
385 filegethash = filehash.hexdigest();
386 return_val.update({hashtypelistup: filegethash});
387 listnumcount += 1;
388 return return_val;
390 def make_http_headers_from_dict_to_list(headers={'Referer': "http://google.com/", 'User-Agent': geturls_ua, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"}):
391 if isinstance(headers, dict):
392 returnval = [];
393 if(sys.version[0]=="2"):
394 for headkey, headvalue in headers.iteritems():
395 returnval.append((headkey, headvalue));
396 if(sys.version[0]>="3"):
397 for headkey, headvalue in headers.items():
398 returnval.append((headkey, headvalue));
399 elif isinstance(headers, list):
400 returnval = headers;
401 else:
402 returnval = False;
403 return returnval;
405 def make_http_headers_from_dict_to_pycurl(headers={'Referer': "http://google.com/", 'User-Agent': geturls_ua, 'Accept-Encoding': compression_supported, 'Accept-Language': "en-US,en;q=0.8,en-CA,en-GB;q=0.6", 'Accept-Charset': "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7", 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", 'Connection': "close"}):
406 if isinstance(headers, dict):
407 returnval = [];
408 if(sys.version[0]=="2"):
409 for headkey, headvalue in headers.iteritems():
410 returnval.append(headkey+": "+headvalue);
411 if(sys.version[0]>="3"):
412 for headkey, headvalue in headers.items():
413 returnval.append(headkey+": "+headvalue);
414 elif isinstance(headers, list):
415 returnval = headers;
416 else:
417 returnval = False;
418 return returnval;
420 def make_http_headers_from_list_to_dict(headers=[("Referer", "http://google.com/"), ("User-Agent", geturls_ua), ("Accept-Encoding", compression_supported), ("Accept-Language", "en-US,en;q=0.8,en-CA,en-GB;q=0.6"), ("Accept-Charset", "ISO-8859-1,ISO-8859-15,utf-8;q=0.7,*;q=0.7"), ("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"), ("Connection", "close")]):
421 if isinstance(headers, list):
422 returnval = {};
423 mli = 0;
424 mlil = len(headers);
425 while(mli<mlil):
426 returnval.update({headers[mli][0]: headers[mli][1]});
427 mli = mli + 1;
428 elif isinstance(headers, dict):
429 returnval = headers;
430 else:
431 returnval = False;
432 return returnval;
434 def get_httplib_support(checkvalue=None):
435 global haverequests, havemechanize, havehttplib2, haveurllib3, havehttpx, havehttpcore, haveparamiko, havepysftp;
436 returnval = [];
437 returnval.append("ftp");
438 returnval.append("httplib");
439 if(havehttplib2):
440 returnval.append("httplib2");
441 returnval.append("urllib");
442 if(haveurllib3):
443 returnval.append("urllib3");
444 returnval.append("request3");
445 returnval.append("request");
446 if(haverequests):
447 returnval.append("requests");
448 if(havehttpx):
449 returnval.append("httpx");
450 returnval.append("httpx2");
451 if(havemechanize):
452 returnval.append("mechanize");
453 if(haveparamiko):
454 returnval.append("sftp");
455 if(havepysftp):
456 returnval.append("pysftp");
457 if(not checkvalue is None):
458 if(checkvalue=="urllib1" or checkvalue=="urllib2"):
459 checkvalue = "urllib";
460 if(checkvalue=="httplib1"):
461 checkvalue = "httplib";
462 if(checkvalue in returnval):
463 returnval = True;
464 else:
465 returnval = False;
466 return returnval;
468 def check_httplib_support(checkvalue="urllib"):
469 if(checkvalue=="urllib1" or checkvalue=="urllib2"):
470 checkvalue = "urllib";
471 if(checkvalue=="httplib1"):
472 checkvalue = "httplib";
473 returnval = get_httplib_support(checkvalue);
474 return returnval;
476 def get_httplib_support_list():
477 returnval = get_httplib_support(None);
478 return returnval;
480 def download_from_url(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", sleep=-1):
481 global geturls_download_sleep, haverequests, havemechanize, havehttplib2, haveurllib3, havehttpx, havehttpcore, haveparamiko, havepysftp;
482 if(sleep<0):
483 sleep = geturls_download_sleep;
484 if(httplibuse=="urllib1" or httplibuse=="urllib2"):
485 httplibuse = "urllib";
486 if(httplibuse=="httplib1"):
487 httplibuse = "httplib";
488 if(not haverequests and httplibuse=="requests"):
489 httplibuse = "urllib";
490 if(not havehttpx and httplibuse=="httpx"):
491 httplibuse = "urllib";
492 if(not havehttpx and httplibuse=="httpx2"):
493 httplibuse = "urllib";
494 if(not havehttpcore and httplibuse=="httpcore"):
495 httplibuse = "urllib";
496 if(not havehttpcore and httplibuse=="httpcore2"):
497 httplibuse = "urllib";
498 if(not havemechanize and httplibuse=="mechanize"):
499 httplibuse = "urllib";
500 if(not havehttplib2 and httplibuse=="httplib2"):
501 httplibuse = "httplib";
502 if(not haveparamiko and httplibuse=="sftp"):
503 httplibuse = "ftp";
504 if(not havepysftp and httplibuse=="pysftp"):
505 httplibuse = "ftp";
506 if(httplibuse=="urllib"):
507 returnval = download_from_url_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep);
508 elif(httplibuse=="request"):
509 returnval = download_from_url_with_request(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep);
510 elif(httplibuse=="request3"):
511 returnval = download_from_url_with_request3(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep);
512 elif(httplibuse=="httplib"):
513 returnval = download_from_url_with_httplib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep);
514 elif(httplibuse=="httplib2"):
515 returnval = download_from_url_with_httplib2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep);
516 elif(httplibuse=="urllib3"):
517 returnval = download_from_url_with_urllib3(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep);
518 elif(httplibuse=="requests"):
519 returnval = download_from_url_with_requests(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep);
520 elif(httplibuse=="httpx"):
521 returnval = download_from_url_with_httpx(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep);
522 elif(httplibuse=="httpx2"):
523 returnval = download_from_url_with_httpx2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep);
524 elif(httplibuse=="httpcore"):
525 returnval = download_from_url_with_httpcore(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep);
526 elif(httplibuse=="httpcore2"):
527 returnval = download_from_url_with_httpcore2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep);
528 elif(httplibuse=="mechanize"):
529 returnval = download_from_url_with_mechanize(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep);
530 elif(httplibuse=="ftp"):
531 returnval = download_from_url_with_ftp(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep);
532 elif(httplibuse=="sftp"):
533 returnval = download_from_url_with_sftp(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep);
534 elif(httplibuse=="pysftp"):
535 returnval = download_from_url_with_pysftp(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep);
536 else:
537 returnval = False;
538 return returnval;
540 def download_from_url_file(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", buffersize=524288, sleep=-1):
541 global geturls_download_sleep, haverequests, havemechanize, havehttplib2, haveurllib3, havehttpx, havehttpcore, haveparamiko, havepysftp;
542 if(sleep<0):
543 sleep = geturls_download_sleep;
544 if(httplibuse=="urllib1" or httplibuse=="urllib2"):
545 httplibuse = "urllib";
546 if(httplibuse=="httplib1"):
547 httplibuse = "httplib";
548 if(not haverequests and httplibuse=="requests"):
549 httplibuse = "urllib";
550 if(not havehttpx and httplibuse=="httpx"):
551 httplibuse = "urllib";
552 if(not havehttpx and httplibuse=="httpx2"):
553 httplibuse = "urllib";
554 if(not havehttpcore and httplibuse=="httpcore"):
555 httplibuse = "urllib";
556 if(not havehttpcore and httplibuse=="httpcore2"):
557 httplibuse = "urllib";
558 if(not havemechanize and httplibuse=="mechanize"):
559 httplibuse = "urllib";
560 if(not havehttplib2 and httplibuse=="httplib2"):
561 httplibuse = "httplib";
562 if(not haveparamiko and httplibuse=="sftp"):
563 httplibuse = "ftp";
564 if(not haveparamiko and httplibuse=="pysftp"):
565 httplibuse = "ftp";
566 if(httplibuse=="urllib"):
567 returnval = download_from_url_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
568 elif(httplibuse=="request"):
569 returnval = download_from_url_file_with_request(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
570 elif(httplibuse=="request3"):
571 returnval = download_from_url_file_with_request3(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
572 elif(httplibuse=="httplib"):
573 returnval = download_from_url_file_with_httplib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
574 elif(httplibuse=="httplib2"):
575 returnval = download_from_url_file_with_httplib2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
576 elif(httplibuse=="urllib3"):
577 returnval = download_from_url_file_with_urllib3(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
578 elif(httplibuse=="requests"):
579 returnval = download_from_url_file_with_requests(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
580 elif(httplibuse=="httpx"):
581 returnval = download_from_url_file_with_httpx(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
582 elif(httplibuse=="httpx2"):
583 returnval = download_from_url_file_with_httpx2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
584 elif(httplibuse=="httpcore"):
585 returnval = download_from_url_file_with_httpcore(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
586 elif(httplibuse=="httpcore2"):
587 returnval = download_from_url_file_with_httpcore2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
588 elif(httplibuse=="mechanize"):
589 returnval = download_from_url_file_with_mechanize(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
590 elif(httplibuse=="ftp"):
591 returnval = download_from_url_file_with_ftp(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
592 elif(httplibuse=="sftp"):
593 returnval = download_from_url_file_with_sftp(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
594 elif(httplibuse=="pysftp"):
595 returnval = download_from_url_file_with_pysftp(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
596 else:
597 returnval = False;
598 return returnval;
600 def download_from_url_to_file(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, httplibuse="urllib", outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
601 global geturls_download_sleep, haverequests, havemechanize, havehttplib2, haveurllib3, havehttpx, havehttpcorei, haveparamiko, havepysftp;
602 if(sleep<0):
603 sleep = geturls_download_sleep;
604 if(httplibuse=="urllib1" or httplibuse=="urllib2"):
605 httplibuse = "urllib";
606 if(httplibuse=="httplib1"):
607 httplibuse = "httplib";
608 if(not haverequests and httplibuse=="requests"):
609 httplibuse = "urllib";
610 if(not havehttpx and httplibuse=="httpx"):
611 httplibuse = "urllib";
612 if(not havehttpx and httplibuse=="httpx2"):
613 httplibuse = "urllib";
614 if(not havehttpcore and httplibuse=="httpcore"):
615 httplibuse = "urllib";
616 if(not havehttpcore and httplibuse=="httpcore2"):
617 httplibuse = "urllib";
618 if(not havemechanize and httplibuse=="mechanize"):
619 httplibuse = "urllib";
620 if(not havehttplib2 and httplibuse=="httplib2"):
621 httplibuse = "httplib";
622 if(not haveparamiko and httplibuse=="sftp"):
623 httplibuse = "ftp";
624 if(not havepysftp and httplibuse=="pysftp"):
625 httplibuse = "ftp";
626 if(httplibuse=="urllib"):
627 returnval = download_from_url_to_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, buffersize, sleep);
628 elif(httplibuse=="request"):
629 returnval = download_from_url_to_file_with_request(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, buffersize, sleep);
630 elif(httplibuse=="request3"):
631 returnval = download_from_url_to_file_with_request3(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, buffersize, sleep);
632 elif(httplibuse=="httplib"):
633 returnval = download_from_url_to_file_with_httplib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, buffersize, sleep);
634 elif(httplibuse=="httplib2"):
635 returnval = download_from_url_to_file_with_httplib2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, buffersize, sleep);
636 elif(httplibuse=="urllib3"):
637 returnval = download_from_url_to_file_with_urllib3(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, buffersize, sleep);
638 elif(httplibuse=="requests"):
639 returnval = download_from_url_to_file_with_requests(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, buffersize, sleep);
640 elif(httplibuse=="httpx"):
641 returnval = download_from_url_to_file_with_httpx(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
642 elif(httplibuse=="httpx2"):
643 returnval = download_from_url_to_file_with_httpx2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
644 elif(httplibuse=="httpcore"):
645 returnval = download_from_url_to_file_with_httpcore(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
646 elif(httplibuse=="httpcore2"):
647 returnval = download_from_url_to_file_with_httpcore2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep);
648 elif(httplibuse=="mechanize"):
649 returnval = download_from_url_to_file_with_mechanize(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, buffersize, sleep);
650 elif(httplibuse=="ftp"):
651 returnval = download_from_url_to_file_with_ftp(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, buffersize, sleep);
652 elif(httplibuse=="sftp"):
653 returnval = download_from_url_to_file_with_sftp(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, buffersize, sleep);
654 elif(httplibuse=="pysftp"):
655 returnval = download_from_url_to_file_with_pysftp(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, outfile, outpath, buffersize, sleep);
656 else:
657 returnval = False;
658 return returnval;
660 def download_from_url_with_urllib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
661 global geturls_download_sleep, havebrotli;
662 if(sleep<0):
663 sleep = geturls_download_sleep;
664 urlparts = urlparse.urlparse(httpurl);
665 if(isinstance(httpheaders, list)):
666 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
667 httpheaders = fix_header_names(httpheaders);
668 if(httpuseragent is not None):
669 if('User-Agent' in httpheaders):
670 httpheaders['User-Agent'] = httpuseragent;
671 else:
672 httpuseragent.update({'User-Agent': httpuseragent});
673 if(httpreferer is not None):
674 if('Referer' in httpheaders):
675 httpheaders['Referer'] = httpreferer;
676 else:
677 httpuseragent.update({'Referer': httpreferer});
678 if(urlparts.username is not None or urlparts.password is not None):
679 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
680 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
681 geturls_opener = build_opener(HTTPCookieProcessor(httpcookie));
682 if(isinstance(httpheaders, dict)):
683 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
684 geturls_opener.addheaders = httpheaders;
685 time.sleep(sleep);
686 if(postdata is not None and not isinstance(postdata, dict)):
687 postdata = urlencode(postdata);
688 try:
689 if(httpmethod=="GET"):
690 geturls_text = geturls_opener.open(httpurl);
691 elif(httpmethod=="POST"):
692 geturls_text = geturls_opener.open(httpurl, data=postdata);
693 else:
694 geturls_text = geturls_opener.open(httpurl);
695 except HTTPError as geturls_text_error:
696 geturls_text = geturls_text_error;
697 log.info("Error With URL "+httpurl);
698 except URLError:
699 log.info("Error With URL "+httpurl);
700 return False;
701 except socket.timeout:
702 log.info("Error With URL "+httpurl);
703 return False;
704 httpcodeout = geturls_text.getcode();
705 httpversionout = "1.1";
706 httpmethodout = httpmethod;
707 httpurlout = geturls_text.geturl();
708 httpheaderout = geturls_text.info();
709 httpheadersentout = httpheaders;
710 if(isinstance(httpheaderout, list)):
711 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
712 httpheaderout = fix_header_names(httpheaderout);
713 if(sys.version[0]=="2"):
714 try:
715 prehttpheaderout = httpheaderout;
716 httpheaderkeys = httpheaderout.keys();
717 imax = len(httpheaderkeys);
718 ic = 0;
719 httpheaderout = {};
720 while(ic < imax):
721 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
722 ic += 1;
723 except AttributeError:
724 pass;
725 if(isinstance(httpheadersentout, list)):
726 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
727 httpheadersentout = fix_header_names(httpheadersentout);
728 log.info("Downloading URL "+httpurl);
729 if(httpheaderout.get("Content-Encoding")=="gzip" or httpheaderout.get("Content-Encoding")=="deflate"):
730 if(sys.version[0]=="2"):
731 strbuf = StringIO(geturls_text.read());
732 if(sys.version[0]>="3"):
733 strbuf = BytesIO(geturls_text.read());
734 gzstrbuf = gzip.GzipFile(fileobj=strbuf);
735 returnval_content = gzstrbuf.read()[:];
736 if(httpheaderout.get("Content-Encoding")!="gzip" and httpheaderout.get("Content-Encoding")!="deflate" and httpheaderout.get("Content-Encoding")!="br"):
737 returnval_content = geturls_text.read()[:];
738 if(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
739 returnval_content = geturls_text.read()[:];
740 returnval_content = brotli.decompress(returnval_content);
741 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
742 geturls_text.close();
743 return returnval;
745 def download_from_url_file_with_urllib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
746 global geturls_download_sleep, tmpfileprefix, tmpfilesuffix;
747 exec_time_start = time.time();
748 myhash = hashlib.new("sha1");
749 if(sys.version[0]=="2"):
750 myhash.update(httpurl);
751 myhash.update(str(buffersize));
752 myhash.update(str(exec_time_start));
753 if(sys.version[0]>="3"):
754 myhash.update(httpurl.encode('utf-8'));
755 myhash.update(str(buffersize).encode('utf-8'));
756 myhash.update(str(exec_time_start).encode('utf-8'));
757 newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest());
758 if(sleep<0):
759 sleep = geturls_download_sleep;
760 urlparts = urlparse.urlparse(httpurl);
761 if(isinstance(httpheaders, list)):
762 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
763 httpheaders = fix_header_names(httpheaders);
764 if(httpuseragent is not None):
765 if('User-Agent' in httpheaders):
766 httpheaders['User-Agent'] = httpuseragent;
767 else:
768 httpuseragent.update({'User-Agent': httpuseragent});
769 if(httpreferer is not None):
770 if('Referer' in httpheaders):
771 httpheaders['Referer'] = httpreferer;
772 else:
773 httpuseragent.update({'Referer': httpreferer});
774 if(urlparts.username is not None or urlparts.password is not None):
775 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
776 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
777 geturls_opener = build_opener(HTTPCookieProcessor(httpcookie));
778 if(isinstance(httpheaders, dict)):
779 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
780 geturls_opener.addheaders = httpheaders;
781 time.sleep(sleep);
782 try:
783 if(httpmethod=="GET"):
784 geturls_text = geturls_opener.open(httpurl);
785 elif(httpmethod=="POST"):
786 geturls_text = geturls_opener.open(httpurl, data=postdata);
787 else:
788 geturls_text = geturls_opener.open(httpurl);
789 except HTTPError as geturls_text_error:
790 geturls_text = geturls_text_error;
791 log.info("Error With URL "+httpurl);
792 except URLError:
793 log.info("Error With URL "+httpurl);
794 return False;
795 except socket.timeout:
796 log.info("Error With URL "+httpurl);
797 return False;
798 except socket.timeout:
799 log.info("Error With URL "+httpurl);
800 return False;
801 httpcodeout = geturls_text.getcode();
802 httpversionout = "1.1";
803 httpmethodout = httpmethod;
804 httpurlout = geturls_text.geturl();
805 httpheaderout = geturls_text.info();
806 httpheadersentout = httpheaders;
807 if(isinstance(httpheaderout, list)):
808 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
809 if(sys.version[0]=="2"):
810 try:
811 prehttpheaderout = httpheaderout;
812 httpheaderkeys = httpheaderout.keys();
813 imax = len(httpheaderkeys);
814 ic = 0;
815 httpheaderout = {};
816 while(ic < imax):
817 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
818 ic += 1;
819 except AttributeError:
820 pass;
821 httpheaderout = fix_header_names(httpheaderout);
822 if(isinstance(httpheadersentout, list)):
823 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
824 httpheadersentout = fix_header_names(httpheadersentout);
825 downloadsize = httpheaderout.get('Content-Length');
826 if(downloadsize is not None):
827 downloadsize = int(downloadsize);
828 if downloadsize is None: downloadsize = 0;
829 fulldatasize = 0;
830 prevdownsize = 0;
831 log.info("Downloading URL "+httpurl);
832 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
833 tmpfilename = f.name;
834 try:
835 os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple())));
836 except AttributeError:
837 try:
838 os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
839 except ValueError:
840 pass;
841 except ValueError:
842 pass;
843 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
844 while True:
845 databytes = geturls_text.read(buffersize);
846 if not databytes: break;
847 datasize = len(databytes);
848 fulldatasize = datasize + fulldatasize;
849 percentage = "";
850 if(downloadsize>0):
851 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
852 downloaddiff = fulldatasize - prevdownsize;
853 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
854 prevdownsize = fulldatasize;
855 f.write(databytes);
856 f.close();
857 geturls_text.close();
858 exec_time_end = time.time();
859 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to download file.");
860 returnval.update({'Filesize': os.path.getsize(tmpfilename), 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)});
861 return returnval;
863 def download_from_url_to_file_with_urllib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
864 global geturls_download_sleep;
865 if(sleep<0):
866 sleep = geturls_download_sleep;
867 if(not outfile=="-"):
868 outpath = outpath.rstrip(os.path.sep);
869 filepath = os.path.realpath(outpath+os.path.sep+outfile);
870 if(not os.path.exists(outpath)):
871 os.makedirs(outpath);
872 if(os.path.exists(outpath) and os.path.isfile(outpath)):
873 return False;
874 if(os.path.exists(filepath) and os.path.isdir(filepath)):
875 return False;
876 pretmpfilename = download_from_url_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
877 if(not pretmpfilename):
878 return False;
879 tmpfilename = pretmpfilename['Filename'];
880 downloadsize = os.path.getsize(tmpfilename);
881 fulldatasize = 0;
882 log.info("Moving file "+tmpfilename+" to "+filepath);
883 exec_time_start = time.time();
884 shutil.move(tmpfilename, filepath);
885 try:
886 os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple())));
887 except AttributeError:
888 try:
889 os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
890 except ValueError:
891 pass;
892 except ValueError:
893 pass;
894 exec_time_end = time.time();
895 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to move file.");
896 if(os.path.exists(tmpfilename)):
897 os.remove(tmpfilename);
898 returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
899 if(outfile=="-" and sys.version[0]=="2"):
900 pretmpfilename = download_from_url_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
901 if(not pretmpfilename):
902 return False;
903 tmpfilename = pretmpfilename['Filename'];
904 downloadsize = os.path.getsize(tmpfilename);
905 fulldatasize = 0;
906 prevdownsize = 0;
907 exec_time_start = time.time();
908 with open(tmpfilename, 'rb') as ft:
909 f = StringIO();
910 while True:
911 databytes = ft.read(buffersize[1]);
912 if not databytes: break;
913 datasize = len(databytes);
914 fulldatasize = datasize + fulldatasize;
915 percentage = "";
916 if(downloadsize>0):
917 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
918 downloaddiff = fulldatasize - prevdownsize;
919 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
920 prevdownsize = fulldatasize;
921 f.write(databytes);
922 f.seek(0);
923 fdata = f.getvalue();
924 f.close();
925 ft.close();
926 os.remove(tmpfilename);
927 exec_time_end = time.time();
928 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
929 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
930 if(outfile=="-" and sys.version[0]>="3"):
931 pretmpfilename = download_from_url_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
932 tmpfilename = pretmpfilename['Filename'];
933 downloadsize = os.path.getsize(tmpfilename);
934 fulldatasize = 0;
935 prevdownsize = 0;
936 exec_time_start = time.time();
937 with open(tmpfilename, 'rb') as ft:
938 f = BytesIO();
939 while True:
940 databytes = ft.read(buffersize[1]);
941 if not databytes: break;
942 datasize = len(databytes);
943 fulldatasize = datasize + fulldatasize;
944 percentage = "";
945 if(downloadsize>0):
946 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
947 downloaddiff = fulldatasize - prevdownsize;
948 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
949 prevdownsize = fulldatasize;
950 f.write(databytes);
951 f.seek(0);
952 fdata = f.getvalue();
953 f.close();
954 ft.close();
955 os.remove(tmpfilename);
956 exec_time_end = time.time();
957 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
958 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
959 return returnval;
961 def download_from_url_with_httplib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
962 global geturls_download_sleep, havebrotli;
963 if(sleep<0):
964 sleep = geturls_download_sleep;
965 urlparts = urlparse.urlparse(httpurl);
966 if(isinstance(httpheaders, list)):
967 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
968 httpheaders = fix_header_names(httpheaders);
969 if(httpuseragent is not None):
970 if('User-Agent' in httpheaders):
971 httpheaders['User-Agent'] = httpuseragent;
972 else:
973 httpuseragent.update({'User-Agent': httpuseragent});
974 if(httpreferer is not None):
975 if('Referer' in httpheaders):
976 httpheaders['Referer'] = httpreferer;
977 else:
978 httpuseragent.update({'Referer': httpreferer});
979 if(urlparts.username is not None or urlparts.password is not None):
980 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
981 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
982 geturls_opener = build_opener(HTTPCookieProcessor(httpcookie));
983 geturls_opener.addheaders = httpheaders;
984 time.sleep(sleep);
985 if(urlparts[0]=="http"):
986 httpconn = HTTPConnection(urlparts[1]);
987 elif(urlparts[0]=="https"):
988 httpconn = HTTPSConnection(urlparts[1]);
989 else:
990 return False;
991 if(postdata is not None and not isinstance(postdata, dict)):
992 postdata = urlencode(postdata);
993 try:
994 if(httpmethod=="GET"):
995 httpconn.request("GET", urlparts[2], headers=httpheaders);
996 elif(httpmethod=="POST"):
997 httpconn.request("GET", urlparts[2], body=postdata, headers=httpheaders);
998 else:
999 httpconn.request("GET", urlparts[2], headers=httpheaders);
1000 except socket.timeout:
1001 log.info("Error With URL "+httpurl);
1002 return False;
1003 except socket.gaierror:
1004 log.info("Error With URL "+httpurl);
1005 return False;
1006 geturls_text = httpconn.getresponse();
1007 httpcodeout = geturls_text.status;
1008 httpversionout = "1.1";
1009 httpmethodout = httpmethod;
1010 httpurlout = httpurl;
1011 httpheaderout = geturls_text.getheaders();
1012 httpheadersentout = httpheaders;
1013 if(isinstance(httpheaderout, list)):
1014 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
1015 if(sys.version[0]=="2"):
1016 try:
1017 prehttpheaderout = httpheaderout;
1018 httpheaderkeys = httpheaderout.keys();
1019 imax = len(httpheaderkeys);
1020 ic = 0;
1021 httpheaderout = {};
1022 while(ic < imax):
1023 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
1024 ic += 1;
1025 except AttributeError:
1026 pass;
1027 httpheaderout = fix_header_names(httpheaderout);
1028 if(isinstance(httpheadersentout, list)):
1029 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
1030 httpheadersentout = fix_header_names(httpheadersentout);
1031 log.info("Downloading URL "+httpurl);
1032 if(httpheaderout.get("Content-Encoding")=="gzip" or httpheaderout.get("Content-Encoding")=="deflate"):
1033 if(sys.version[0]=="2"):
1034 strbuf = StringIO(geturls_text.read());
1035 if(sys.version[0]>="3"):
1036 strbuf = BytesIO(geturls_text.read());
1037 gzstrbuf = gzip.GzipFile(fileobj=strbuf);
1038 returnval_content = gzstrbuf.read()[:];
1039 if(httpheaderout.get("Content-Encoding")!="gzip" and httpheaderout.get("Content-Encoding")!="deflate" and httpheaderout.get("Content-Encoding")!="br"):
1040 returnval_content = geturls_text.read()[:];
1041 if(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
1042 returnval_content = geturls_text.read()[:];
1043 returnval_content = brotli.decompress(returnval_content);
1044 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
1045 geturls_text.close();
1046 return returnval;
1048 def download_from_url_file_with_httplib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
1049 global geturls_download_sleep, tmpfileprefix, tmpfilesuffix;
1050 exec_time_start = time.time();
1051 myhash = hashlib.new("sha1");
1052 if(sys.version[0]=="2"):
1053 myhash.update(httpurl);
1054 myhash.update(str(buffersize));
1055 myhash.update(str(exec_time_start));
1056 if(sys.version[0]>="3"):
1057 myhash.update(httpurl.encode('utf-8'));
1058 myhash.update(str(buffersize).encode('utf-8'));
1059 myhash.update(str(exec_time_start).encode('utf-8'));
1060 newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest());
1061 if(sleep<0):
1062 sleep = geturls_download_sleep;
1063 urlparts = urlparse.urlparse(httpurl);
1064 if(isinstance(httpheaders, list)):
1065 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
1066 httpheaders = fix_header_names(httpheaders);
1067 if(httpuseragent is not None):
1068 if('User-Agent' in httpheaders):
1069 httpheaders['User-Agent'] = httpuseragent;
1070 else:
1071 httpuseragent.update({'User-Agent': httpuseragent});
1072 if(httpreferer is not None):
1073 if('Referer' in httpheaders):
1074 httpheaders['Referer'] = httpreferer;
1075 else:
1076 httpuseragent.update({'Referer': httpreferer});
1077 if(urlparts.username is not None or urlparts.password is not None):
1078 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
1079 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
1080 geturls_opener = build_opener(HTTPCookieProcessor(httpcookie));
1081 geturls_opener.addheaders = httpheaders;
1082 time.sleep(sleep);
1083 if(urlparts[0]=="http"):
1084 httpconn = HTTPConnection(urlparts[1]);
1085 elif(urlparts[0]=="https"):
1086 httpconn = HTTPSConnection(urlparts[1]);
1087 else:
1088 return False;
1089 if(postdata is not None and not isinstance(postdata, dict)):
1090 postdata = urlencode(postdata);
1091 try:
1092 if(httpmethod=="GET"):
1093 httpconn.request("GET", urlparts[2], headers=httpheaders);
1094 elif(httpmethod=="POST"):
1095 httpconn.request("GET", urlparts[2], body=postdata, headers=httpheaders);
1096 else:
1097 httpconn.request("GET", urlparts[2], headers=httpheaders);
1098 except socket.timeout:
1099 log.info("Error With URL "+httpurl);
1100 return False;
1101 except socket.gaierror:
1102 log.info("Error With URL "+httpurl);
1103 return False;
1104 geturls_text = httpconn.getresponse();
1105 httpcodeout = geturls_text.status;
1106 httpversionout = "1.1";
1107 httpmethodout = httpmethod;
1108 httpurlout = httpurl;
1109 httpheaderout = geturls_text.getheaders();
1110 httpheadersentout = httpheaders;
1111 if(isinstance(httpheaderout, list)):
1112 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
1113 if(sys.version[0]=="2"):
1114 try:
1115 prehttpheaderout = httpheaderout;
1116 httpheaderkeys = httpheaderout.keys();
1117 imax = len(httpheaderkeys);
1118 ic = 0;
1119 httpheaderout = {};
1120 while(ic < imax):
1121 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
1122 ic += 1;
1123 except AttributeError:
1124 pass;
1125 httpheaderout = fix_header_names(httpheaderout);
1126 if(isinstance(httpheadersentout, list)):
1127 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
1128 httpheadersentout = fix_header_names(httpheadersentout);
1129 downloadsize = httpheaderout.get('Content-Length');
1130 if(downloadsize is not None):
1131 downloadsize = int(downloadsize);
1132 if downloadsize is None: downloadsize = 0;
1133 fulldatasize = 0;
1134 prevdownsize = 0;
1135 log.info("Downloading URL "+httpurl);
1136 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
1137 tmpfilename = f.name;
1138 try:
1139 os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple())));
1140 except AttributeError:
1141 try:
1142 os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
1143 except ValueError:
1144 pass;
1145 except ValueError:
1146 pass;
1147 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Type': "Content", 'Content': returnval_content, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
1148 while True:
1149 databytes = geturls_text.read(buffersize);
1150 if not databytes: break;
1151 datasize = len(databytes);
1152 fulldatasize = datasize + fulldatasize;
1153 percentage = "";
1154 if(downloadsize>0):
1155 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1156 downloaddiff = fulldatasize - prevdownsize;
1157 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1158 prevdownsize = fulldatasize;
1159 f.write(databytes);
1160 f.close();
1161 geturls_text.close();
1162 exec_time_end = time.time();
1163 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to download file.");
1164 returnval.update({'Filesize': os.path.getsize(tmpfilename), 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)});
1165 return returnval;
1167 def download_from_url_to_file_with_httplib(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
1168 global geturls_download_sleep;
1169 if(sleep<0):
1170 sleep = geturls_download_sleep;
1171 if(not outfile=="-"):
1172 outpath = outpath.rstrip(os.path.sep);
1173 filepath = os.path.realpath(outpath+os.path.sep+outfile);
1174 if(not os.path.exists(outpath)):
1175 os.makedirs(outpath);
1176 if(os.path.exists(outpath) and os.path.isfile(outpath)):
1177 return False;
1178 if(os.path.exists(filepath) and os.path.isdir(filepath)):
1179 return False;
1180 pretmpfilename = download_from_url_file_with_httplib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
1181 if(not pretmpfilename):
1182 return False;
1183 tmpfilename = pretmpfilename['Filename'];
1184 downloadsize = os.path.getsize(tmpfilename);
1185 fulldatasize = 0;
1186 log.info("Moving file "+tmpfilename+" to "+filepath);
1187 exec_time_start = time.time();
1188 shutil.move(tmpfilename, filepath);
1189 try:
1190 os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple())));
1191 except AttributeError:
1192 try:
1193 os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
1194 except ValueError:
1195 pass;
1196 except ValueError:
1197 pass;
1198 exec_time_end = time.time();
1199 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to move file.");
1200 if(os.path.exists(tmpfilename)):
1201 os.remove(tmpfilename);
1202 returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
1203 if(outfile=="-" and sys.version[0]=="2"):
1204 pretmpfilename = download_from_url_file_with_httplib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
1205 if(not pretmpfilename):
1206 return False;
1207 tmpfilename = pretmpfilename['Filename'];
1208 downloadsize = os.path.getsize(tmpfilename);
1209 fulldatasize = 0;
1210 prevdownsize = 0;
1211 exec_time_start = time.time();
1212 with open(tmpfilename, 'rb') as ft:
1213 f = StringIO();
1214 while True:
1215 databytes = ft.read(buffersize[1]);
1216 if not databytes: break;
1217 datasize = len(databytes);
1218 fulldatasize = datasize + fulldatasize;
1219 percentage = "";
1220 if(downloadsize>0):
1221 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1222 downloaddiff = fulldatasize - prevdownsize;
1223 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1224 prevdownsize = fulldatasize;
1225 f.write(databytes);
1226 f.seek(0);
1227 fdata = f.getvalue();
1228 f.close();
1229 ft.close();
1230 os.remove(tmpfilename);
1231 exec_time_end = time.time();
1232 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
1233 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
1234 if(outfile=="-" and sys.version[0]>="3"):
1235 pretmpfilename = download_from_url_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
1236 tmpfilename = pretmpfilename['Filename'];
1237 downloadsize = os.path.getsize(tmpfilename);
1238 fulldatasize = 0;
1239 prevdownsize = 0;
1240 exec_time_start = time.time();
1241 with open(tmpfilename, 'rb') as ft:
1242 f = BytesIO();
1243 while True:
1244 databytes = ft.read(buffersize[1]);
1245 if not databytes: break;
1246 datasize = len(databytes);
1247 fulldatasize = datasize + fulldatasize;
1248 percentage = "";
1249 if(downloadsize>0):
1250 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1251 downloaddiff = fulldatasize - prevdownsize;
1252 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1253 prevdownsize = fulldatasize;
1254 f.write(databytes);
1255 f.seek(0);
1256 fdata = f.getvalue();
1257 f.close();
1258 ft.close();
1259 os.remove(tmpfilename);
1260 exec_time_end = time.time();
1261 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
1262 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
1263 return returnval;
1265 if(havehttplib2):
1266 def download_from_url_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
1267 global geturls_download_sleep, havebrotli;
1268 if(sleep<0):
1269 sleep = geturls_download_sleep;
1270 urlparts = urlparse.urlparse(httpurl);
1271 if(isinstance(httpheaders, list)):
1272 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
1273 httpheaders = fix_header_names(httpheaders);
1274 if(httpuseragent is not None):
1275 if('User-Agent' in httpheaders):
1276 httpheaders['User-Agent'] = httpuseragent;
1277 else:
1278 httpuseragent.update({'User-Agent': httpuseragent});
1279 if(httpreferer is not None):
1280 if('Referer' in httpheaders):
1281 httpheaders['Referer'] = httpreferer;
1282 else:
1283 httpuseragent.update({'Referer': httpreferer});
1284 if(urlparts.username is not None or urlparts.password is not None):
1285 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
1286 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
1287 geturls_opener = build_opener(HTTPCookieProcessor(httpcookie));
1288 geturls_opener.addheaders = httpheaders;
1289 time.sleep(sleep);
1290 if(urlparts[0]=="http"):
1291 httpconn = HTTPConnectionWithTimeout(urlparts[1]);
1292 elif(urlparts[0]=="https"):
1293 httpconn = HTTPSConnectionWithTimeout(urlparts[1]);
1294 else:
1295 return False;
1296 if(postdata is not None and not isinstance(postdata, dict)):
1297 postdata = urlencode(postdata);
1298 try:
1299 if(httpmethod=="GET"):
1300 httpconn.request("GET", urlparts[2], headers=httpheaders);
1301 elif(httpmethod=="POST"):
1302 httpconn.request("GET", urlparts[2], body=postdata, headers=httpheaders);
1303 else:
1304 httpconn.request("GET", urlparts[2], headers=httpheaders);
1305 except socket.timeout:
1306 log.info("Error With URL "+httpurl);
1307 return False;
1308 except socket.gaierror:
1309 log.info("Error With URL "+httpurl);
1310 return False;
1311 geturls_text = httpconn.getresponse();
1312 httpcodeout = geturls_text.status;
1313 httpversionout = "1.1";
1314 httpmethodout = httpmethod;
1315 httpurlout = httpurl;
1316 httpheaderout = geturls_text.getheaders();
1317 httpheadersentout = httpheaders;
1318 if(isinstance(httpheaderout, list)):
1319 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
1320 if(sys.version[0]=="2"):
1321 try:
1322 prehttpheaderout = httpheaderout;
1323 httpheaderkeys = httpheaderout.keys();
1324 imax = len(httpheaderkeys);
1325 ic = 0;
1326 httpheaderout = {};
1327 while(ic < imax):
1328 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
1329 ic += 1;
1330 except AttributeError:
1331 pass;
1332 httpheaderout = fix_header_names(httpheaderout);
1333 if(isinstance(httpheadersentout, list)):
1334 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
1335 httpheadersentout = fix_header_names(httpheadersentout);
1336 log.info("Downloading URL "+httpurl);
1337 if(httpheaderout.get("Content-Encoding")=="gzip" or httpheaderout.get("Content-Encoding")=="deflate"):
1338 if(sys.version[0]=="2"):
1339 strbuf = StringIO(geturls_text.read());
1340 if(sys.version[0]>="3"):
1341 strbuf = BytesIO(geturls_text.read());
1342 gzstrbuf = gzip.GzipFile(fileobj=strbuf);
1343 returnval_content = gzstrbuf.read()[:];
1344 if(httpheaderout.get("Content-Encoding")!="gzip" and httpheaderout.get("Content-Encoding")!="deflate" and httpheaderout.get("Content-Encoding")!="br"):
1345 returnval_content = geturls_text.read()[:];
1346 if(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
1347 returnval_content = geturls_text.read()[:];
1348 returnval_content = brotli.decompress(returnval_content);
1349 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
1350 geturls_text.close();
1351 return returnval;
1353 if(not havehttplib2):
1354 def download_from_url_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
1355 returnval = download_from_url_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep)
1356 return returnval;
1358 if(havehttplib2):
1359 def download_from_url_file_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
1360 global geturls_download_sleep, tmpfileprefix, tmpfilesuffix;
1361 exec_time_start = time.time();
1362 myhash = hashlib.new("sha1");
1363 if(sys.version[0]=="2"):
1364 myhash.update(httpurl);
1365 myhash.update(str(buffersize));
1366 myhash.update(str(exec_time_start));
1367 if(sys.version[0]>="3"):
1368 myhash.update(httpurl.encode('utf-8'));
1369 myhash.update(str(buffersize).encode('utf-8'));
1370 myhash.update(str(exec_time_start).encode('utf-8'));
1371 newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest());
1372 if(sleep<0):
1373 sleep = geturls_download_sleep;
1374 urlparts = urlparse.urlparse(httpurl);
1375 if(isinstance(httpheaders, list)):
1376 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
1377 httpheaders = fix_header_names(httpheaders);
1378 if(httpuseragent is not None):
1379 if('User-Agent' in httpheaders):
1380 httpheaders['User-Agent'] = httpuseragent;
1381 else:
1382 httpuseragent.update({'User-Agent': httpuseragent});
1383 if(httpreferer is not None):
1384 if('Referer' in httpheaders):
1385 httpheaders['Referer'] = httpreferer;
1386 else:
1387 httpuseragent.update({'Referer': httpreferer});
1388 if(urlparts.username is not None or urlparts.password is not None):
1389 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
1390 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
1391 geturls_opener = build_opener(HTTPCookieProcessor(httpcookie));
1392 geturls_opener.addheaders = httpheaders;
1393 time.sleep(sleep);
1394 if(urlparts[0]=="http"):
1395 httpconn = HTTPConnectionWithTimeout(urlparts[1]);
1396 elif(urlparts[0]=="https"):
1397 httpconn = HTTPSConnectionWithTimeout(urlparts[1]);
1398 else:
1399 return False;
1400 if(postdata is not None and not isinstance(postdata, dict)):
1401 postdata = urlencode(postdata);
1402 try:
1403 if(httpmethod=="GET"):
1404 httpconn.request("GET", urlparts[2], headers=httpheaders);
1405 elif(httpmethod=="POST"):
1406 httpconn.request("GET", urlparts[2], body=postdata, headers=httpheaders);
1407 else:
1408 httpconn.request("GET", urlparts[2], headers=httpheaders);
1409 except socket.timeout:
1410 log.info("Error With URL "+httpurl);
1411 return False;
1412 except socket.gaierror:
1413 log.info("Error With URL "+httpurl);
1414 return False;
1415 geturls_text = httpconn.getresponse();
1416 httpcodeout = geturls_text.status;
1417 httpversionout = "1.1";
1418 httpmethodout = httpmethod;
1419 httpurlout = httpurl;
1420 httpheaderout = geturls_text.getheaders();
1421 httpheadersentout = httpheaders;
1422 if(isinstance(httpheaderout, list)):
1423 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
1424 if(sys.version[0]=="2"):
1425 try:
1426 prehttpheaderout = httpheaderout;
1427 httpheaderkeys = httpheaderout.keys();
1428 imax = len(httpheaderkeys);
1429 ic = 0;
1430 httpheaderout = {};
1431 while(ic < imax):
1432 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
1433 ic += 1;
1434 except AttributeError:
1435 pass;
1436 httpheaderout = fix_header_names(httpheaderout);
1437 if(isinstance(httpheadersentout, list)):
1438 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
1439 httpheadersentout = fix_header_names(httpheadersentout);
1440 downloadsize = httpheaderout.get('Content-Length');
1441 if(downloadsize is not None):
1442 downloadsize = int(downloadsize);
1443 if downloadsize is None: downloadsize = 0;
1444 fulldatasize = 0;
1445 prevdownsize = 0;
1446 log.info("Downloading URL "+httpurl);
1447 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
1448 tmpfilename = f.name;
1449 try:
1450 os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple())));
1451 except AttributeError:
1452 try:
1453 os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
1454 except ValueError:
1455 pass;
1456 except ValueError:
1457 pass;
1458 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
1459 while True:
1460 databytes = geturls_text.read(buffersize);
1461 if not databytes: break;
1462 datasize = len(databytes);
1463 fulldatasize = datasize + fulldatasize;
1464 percentage = "";
1465 if(downloadsize>0):
1466 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1467 downloaddiff = fulldatasize - prevdownsize;
1468 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1469 prevdownsize = fulldatasize;
1470 f.write(databytes);
1471 f.close();
1472 geturls_text.close();
1473 exec_time_end = time.time();
1474 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to download file.");
1475 returnval.update({'Filesize': os.path.getsize(tmpfilename), 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)});
1476 return returnval;
1478 if(not havehttplib2):
1479 def download_from_url_file_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
1480 returnval = download_from_url_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep)
1481 return returnval;
1483 if(havehttplib2):
1484 def download_from_url_to_file_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
1485 global geturls_download_sleep;
1486 if(sleep<0):
1487 sleep = geturls_download_sleep;
1488 if(not outfile=="-"):
1489 outpath = outpath.rstrip(os.path.sep);
1490 filepath = os.path.realpath(outpath+os.path.sep+outfile);
1491 if(not os.path.exists(outpath)):
1492 os.makedirs(outpath);
1493 if(os.path.exists(outpath) and os.path.isfile(outpath)):
1494 return False;
1495 if(os.path.exists(filepath) and os.path.isdir(filepath)):
1496 return False;
1497 pretmpfilename = download_from_url_file_with_httplib2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
1498 if(not pretmpfilename):
1499 return False;
1500 tmpfilename = pretmpfilename['Filename'];
1501 downloadsize = os.path.getsize(tmpfilename);
1502 fulldatasize = 0;
1503 log.info("Moving file "+tmpfilename+" to "+filepath);
1504 exec_time_start = time.time();
1505 shutil.move(tmpfilename, filepath);
1506 try:
1507 os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple())));
1508 except AttributeError:
1509 try:
1510 os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
1511 except ValueError:
1512 pass;
1513 except ValueError:
1514 pass;
1515 exec_time_end = time.time();
1516 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to move file.");
1517 if(os.path.exists(tmpfilename)):
1518 os.remove(tmpfilename);
1519 returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
1520 if(outfile=="-" and sys.version[0]=="2"):
1521 pretmpfilename = download_from_url_file_with_httplib2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
1522 if(not pretmpfilename):
1523 return False;
1524 tmpfilename = pretmpfilename['Filename'];
1525 downloadsize = os.path.getsize(tmpfilename);
1526 fulldatasize = 0;
1527 prevdownsize = 0;
1528 exec_time_start = time.time();
1529 with open(tmpfilename, 'rb') as ft:
1530 f = StringIO();
1531 while True:
1532 databytes = ft.read(buffersize[1]);
1533 if not databytes: break;
1534 datasize = len(databytes);
1535 fulldatasize = datasize + fulldatasize;
1536 percentage = "";
1537 if(downloadsize>0):
1538 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1539 downloaddiff = fulldatasize - prevdownsize;
1540 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1541 prevdownsize = fulldatasize;
1542 f.write(databytes);
1543 f.seek(0);
1544 fdata = f.getvalue();
1545 f.close();
1546 ft.close();
1547 os.remove(tmpfilename);
1548 exec_time_end = time.time();
1549 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
1550 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
1551 if(outfile=="-" and sys.version[0]>="3"):
1552 pretmpfilename = download_from_url_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
1553 tmpfilename = pretmpfilename['Filename'];
1554 downloadsize = os.path.getsize(tmpfilename);
1555 fulldatasize = 0;
1556 prevdownsize = 0;
1557 exec_time_start = time.time();
1558 with open(tmpfilename, 'rb') as ft:
1559 f = BytesIO();
1560 while True:
1561 databytes = ft.read(buffersize[1]);
1562 if not databytes: break;
1563 datasize = len(databytes);
1564 fulldatasize = datasize + fulldatasize;
1565 percentage = "";
1566 if(downloadsize>0):
1567 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1568 downloaddiff = fulldatasize - prevdownsize;
1569 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1570 prevdownsize = fulldatasize;
1571 f.write(databytes);
1572 f.seek(0);
1573 fdata = f.getvalue();
1574 f.close();
1575 ft.close();
1576 os.remove(tmpfilename);
1577 exec_time_end = time.time();
1578 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
1579 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
1580 return returnval;
1582 if(not havehttplib2):
1583 def download_from_url_to_file_with_httplib2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
1584 returnval = download_from_url_to_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep)
1585 return returnval;
1587 def download_from_url_with_request(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
1588 global geturls_download_sleep, havebrotli;
1589 if(sleep<0):
1590 sleep = geturls_download_sleep;
1591 urlparts = urlparse.urlparse(httpurl);
1592 if(isinstance(httpheaders, list)):
1593 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
1594 httpheaders = fix_header_names(httpheaders);
1595 if(httpuseragent is not None):
1596 if('User-Agent' in httpheaders):
1597 httpheaders['User-Agent'] = httpuseragent;
1598 else:
1599 httpuseragent.update({'User-Agent': httpuseragent});
1600 if(httpreferer is not None):
1601 if('Referer' in httpheaders):
1602 httpheaders['Referer'] = httpreferer;
1603 else:
1604 httpuseragent.update({'Referer': httpreferer});
1605 if(urlparts.username is not None or urlparts.password is not None):
1606 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
1607 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
1608 geturls_opener = build_opener(HTTPCookieProcessor(httpcookie));
1609 if(isinstance(httpheaders, dict)):
1610 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
1611 geturls_opener.addheaders = httpheaders;
1612 install_opener(geturls_opener);
1613 time.sleep(sleep);
1614 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
1615 if(postdata is not None and not isinstance(postdata, dict)):
1616 postdata = urlencode(postdata);
1617 try:
1618 if(httpmethod=="GET"):
1619 geturls_request = Request(httpurl, headers=httpheaders);
1620 geturls_text = urlopen(geturls_request);
1621 elif(httpmethod=="POST"):
1622 geturls_request = Request(httpurl, headers=httpheaders);
1623 geturls_text = urlopen(geturls_request, data=postdata);
1624 else:
1625 geturls_request = Request(httpurl, headers=httpheaders);
1626 geturls_text = urlopen(geturls_request);
1627 except HTTPError as geturls_text_error:
1628 geturls_text = geturls_text_error;
1629 log.info("Error With URL "+httpurl);
1630 except URLError:
1631 log.info("Error With URL "+httpurl);
1632 return False;
1633 except socket.timeout:
1634 log.info("Error With URL "+httpurl);
1635 return False;
1636 httpcodeout = geturls_text.getcode();
1637 httpversionout = "1.1";
1638 httpmethodout = httpmethod;
1639 httpurlout = geturls_text.geturl();
1640 httpheaderout = geturls_text.headers;
1641 httpheadersentout = httpheaders;
1642 if(isinstance(httpheaderout, list)):
1643 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
1644 if(sys.version[0]=="2"):
1645 try:
1646 prehttpheaderout = httpheaderout;
1647 httpheaderkeys = httpheaderout.keys();
1648 imax = len(httpheaderkeys);
1649 ic = 0;
1650 httpheaderout = {};
1651 while(ic < imax):
1652 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
1653 ic += 1;
1654 except AttributeError:
1655 pass;
1656 httpheaderout = fix_header_names(httpheaderout);
1657 if(isinstance(httpheadersentout, list)):
1658 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
1659 httpheadersentout = fix_header_names(httpheadersentout);
1660 log.info("Downloading URL "+httpurl);
1661 if(httpheaderout.get("Content-Encoding")=="gzip" or httpheaderout.get("Content-Encoding")=="deflate"):
1662 if(sys.version[0]=="2"):
1663 strbuf = StringIO(geturls_text.read());
1664 if(sys.version[0]>="3"):
1665 strbuf = BytesIO(geturls_text.read());
1666 gzstrbuf = gzip.GzipFile(fileobj=strbuf);
1667 returnval_content = gzstrbuf.read()[:];
1668 if(httpheaderout.get("Content-Encoding")!="gzip" and httpheaderout.get("Content-Encoding")!="deflate" and httpheaderout.get("Content-Encoding")!="br"):
1669 returnval_content = geturls_text.read()[:];
1670 if(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
1671 returnval_content = geturls_text.read()[:];
1672 returnval_content = brotli.decompress(returnval_content);
1673 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
1674 geturls_text.close();
1675 return returnval;
1677 def download_from_url_file_with_request(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
1678 global geturls_download_sleep, tmpfileprefix, tmpfilesuffix;
1679 exec_time_start = time.time();
1680 myhash = hashlib.new("sha1");
1681 if(sys.version[0]=="2"):
1682 myhash.update(httpurl);
1683 myhash.update(str(buffersize));
1684 myhash.update(str(exec_time_start));
1685 if(sys.version[0]>="3"):
1686 myhash.update(httpurl.encode('utf-8'));
1687 myhash.update(str(buffersize).encode('utf-8'));
1688 myhash.update(str(exec_time_start).encode('utf-8'));
1689 newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest());
1690 if(sleep<0):
1691 sleep = geturls_download_sleep;
1692 urlparts = urlparse.urlparse(httpurl);
1693 if(isinstance(httpheaders, list)):
1694 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
1695 httpheaders = fix_header_names(httpheaders);
1696 if(httpuseragent is not None):
1697 if('User-Agent' in httpheaders):
1698 httpheaders['User-Agent'] = httpuseragent;
1699 else:
1700 httpuseragent.update({'User-Agent': httpuseragent});
1701 if(httpreferer is not None):
1702 if('Referer' in httpheaders):
1703 httpheaders['Referer'] = httpreferer;
1704 else:
1705 httpuseragent.update({'Referer': httpreferer});
1706 if(urlparts.username is not None or urlparts.password is not None):
1707 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
1708 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
1709 geturls_opener = build_opener(HTTPCookieProcessor(httpcookie));
1710 if(isinstance(httpheaders, dict)):
1711 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
1712 geturls_opener.addheaders = httpheaders;
1713 install_opener(geturls_opener);
1714 time.sleep(sleep);
1715 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
1716 if(postdata is not None and not isinstance(postdata, dict)):
1717 postdata = urlencode(postdata);
1718 try:
1719 if(httpmethod=="GET"):
1720 geturls_request = Request(httpurl, headers=httpheaders);
1721 geturls_text = urlopen(geturls_request);
1722 elif(httpmethod=="POST"):
1723 geturls_request = Request(httpurl, headers=httpheaders);
1724 geturls_text = urlopen(geturls_request, data=postdata);
1725 else:
1726 geturls_request = Request(httpurl, headers=httpheaders);
1727 geturls_text = urlopen(geturls_request);
1728 except HTTPError as geturls_text_error:
1729 geturls_text = geturls_text_error;
1730 log.info("Error With URL "+httpurl);
1731 except URLError:
1732 log.info("Error With URL "+httpurl);
1733 return False;
1734 except socket.timeout:
1735 log.info("Error With URL "+httpurl);
1736 return False;
1737 httpcodeout = geturls_text.getcode();
1738 httpversionout = "1.1";
1739 httpmethodout = httpmethod;
1740 httpurlout = geturls_text.geturl();
1741 httpheaderout = geturls_text.headers;
1742 httpheadersentout = httpheaders;
1743 if(isinstance(httpheaderout, list)):
1744 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
1745 if(sys.version[0]=="2"):
1746 try:
1747 prehttpheaderout = httpheaderout;
1748 httpheaderkeys = httpheaderout.keys();
1749 imax = len(httpheaderkeys);
1750 ic = 0;
1751 httpheaderout = {};
1752 while(ic < imax):
1753 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
1754 ic += 1;
1755 except AttributeError:
1756 pass;
1757 httpheaderout = fix_header_names(httpheaderout);
1758 if(isinstance(httpheadersentout, list)):
1759 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
1760 httpheadersentout = fix_header_names(httpheadersentout);
1761 downloadsize = httpheaderout.get('Content-Length');
1762 if(downloadsize is not None):
1763 downloadsize = int(downloadsize);
1764 if downloadsize is None: downloadsize = 0;
1765 fulldatasize = 0;
1766 prevdownsize = 0;
1767 log.info("Downloading URL "+httpurl);
1768 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
1769 tmpfilename = f.name;
1770 try:
1771 os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple())));
1772 except AttributeError:
1773 try:
1774 os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
1775 except ValueError:
1776 pass;
1777 except ValueError:
1778 pass;
1779 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
1780 while True:
1781 databytes = geturls_text.read(buffersize);
1782 if not databytes: break;
1783 datasize = len(databytes);
1784 fulldatasize = datasize + fulldatasize;
1785 percentage = "";
1786 if(downloadsize>0):
1787 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1788 downloaddiff = fulldatasize - prevdownsize;
1789 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1790 prevdownsize = fulldatasize;
1791 f.write(databytes);
1792 f.close();
1793 geturls_text.close();
1794 exec_time_end = time.time();
1795 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to download file.");
1796 returnval.update({'Filesize': os.path.getsize(tmpfilename), 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)});
1797 return returnval;
1799 def download_from_url_to_file_with_request(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
1800 global geturls_download_sleep;
1801 if(sleep<0):
1802 sleep = geturls_download_sleep;
1803 if(not outfile=="-"):
1804 outpath = outpath.rstrip(os.path.sep);
1805 filepath = os.path.realpath(outpath+os.path.sep+outfile);
1806 if(not os.path.exists(outpath)):
1807 os.makedirs(outpath);
1808 if(os.path.exists(outpath) and os.path.isfile(outpath)):
1809 return False;
1810 if(os.path.exists(filepath) and os.path.isdir(filepath)):
1811 return False;
1812 pretmpfilename = download_from_url_file_with_request(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
1813 if(not pretmpfilename):
1814 return False;
1815 tmpfilename = pretmpfilename['Filename'];
1816 downloadsize = os.path.getsize(tmpfilename);
1817 fulldatasize = 0;
1818 log.info("Moving file "+tmpfilename+" to "+filepath);
1819 exec_time_start = time.time();
1820 shutil.move(tmpfilename, filepath);
1821 try:
1822 os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple())));
1823 except AttributeError:
1824 try:
1825 os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
1826 except ValueError:
1827 pass;
1828 except ValueError:
1829 pass;
1830 exec_time_end = time.time();
1831 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to move file.");
1832 if(os.path.exists(tmpfilename)):
1833 os.remove(tmpfilename);
1834 returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent':pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
1835 if(outfile=="-" and sys.version[0]=="2"):
1836 pretmpfilename = download_from_url_file_with_request(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
1837 if(not pretmpfilename):
1838 return False;
1839 tmpfilename = pretmpfilename['Filename'];
1840 downloadsize = os.path.getsize(tmpfilename);
1841 fulldatasize = 0;
1842 prevdownsize = 0;
1843 exec_time_start = time.time();
1844 with open(tmpfilename, 'rb') as ft:
1845 f = StringIO();
1846 while True:
1847 databytes = ft.read(buffersize[1]);
1848 if not databytes: break;
1849 datasize = len(databytes);
1850 fulldatasize = datasize + fulldatasize;
1851 percentage = "";
1852 if(downloadsize>0):
1853 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1854 downloaddiff = fulldatasize - prevdownsize;
1855 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1856 prevdownsize = fulldatasize;
1857 f.write(databytes);
1858 f.seek(0);
1859 fdata = f.getvalue();
1860 f.close();
1861 ft.close();
1862 os.remove(tmpfilename);
1863 exec_time_end = time.time();
1864 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
1865 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
1866 if(outfile=="-" and sys.version[0]>="3"):
1867 pretmpfilename = download_from_url_file_with_request(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
1868 tmpfilename = pretmpfilename['Filename'];
1869 downloadsize = os.path.getsize(tmpfilename);
1870 fulldatasize = 0;
1871 prevdownsize = 0;
1872 exec_time_start = time.time();
1873 with open(tmpfilename, 'rb') as ft:
1874 f = BytesIO();
1875 while True:
1876 databytes = ft.read(buffersize[1]);
1877 if not databytes: break;
1878 datasize = len(databytes);
1879 fulldatasize = datasize + fulldatasize;
1880 percentage = "";
1881 if(downloadsize>0):
1882 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
1883 downloaddiff = fulldatasize - prevdownsize;
1884 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
1885 prevdownsize = fulldatasize;
1886 f.write(databytes);
1887 f.seek(0);
1888 fdata = f.getvalue();
1889 f.close();
1890 ft.close();
1891 os.remove(tmpfilename);
1892 exec_time_end = time.time();
1893 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
1894 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
1895 return returnval;
1897 if(haverequests):
1898 def download_from_url_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
1899 global geturls_download_sleep, havebrotli;
1900 if(sleep<0):
1901 sleep = geturls_download_sleep;
1902 urlparts = urlparse.urlparse(httpurl);
1903 if(isinstance(httpheaders, list)):
1904 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
1905 httpheaders = fix_header_names(httpheaders);
1906 if(httpuseragent is not None):
1907 if('User-Agent' in httpheaders):
1908 httpheaders['User-Agent'] = httpuseragent;
1909 else:
1910 httpuseragent.update({'User-Agent': httpuseragent});
1911 if(httpreferer is not None):
1912 if('Referer' in httpheaders):
1913 httpheaders['Referer'] = httpreferer;
1914 else:
1915 httpuseragent.update({'Referer': httpreferer});
1916 if(urlparts.username is not None or urlparts.password is not None):
1917 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
1918 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
1919 time.sleep(sleep);
1920 if(postdata is not None and not isinstance(postdata, dict)):
1921 postdata = urlencode(postdata);
1922 try:
1923 if(httpmethod=="GET"):
1924 geturls_text = requests.get(httpurl, headers=httpheaders, cookies=httpcookie, stream=True);
1925 elif(httpmethod=="POST"):
1926 geturls_text = requests.post(httpurl, data=postdata, headers=httpheaders, cookies=httpcookie, stream=True);
1927 else:
1928 geturls_text = requests.get(httpurl, headers=httpheaders, cookies=httpcookie, stream=True);
1929 except requests.exceptions.ConnectTimeout:
1930 log.info("Error With URL "+httpurl);
1931 return False;
1932 except requests.exceptions.ConnectError:
1933 log.info("Error With URL "+httpurl);
1934 return False;
1935 except socket.timeout:
1936 log.info("Error With URL "+httpurl);
1937 return False;
1938 httpcodeout = geturls_text.status_code;
1939 httpversionout = "1.1";
1940 httpmethodout = httpmethod;
1941 httpurlout = geturls_text.url;
1942 httpheaderout = geturls_text.headers;
1943 httpheadersentout = httpheaders;
1944 if(isinstance(httpheaderout, list)):
1945 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
1946 if(sys.version[0]=="2"):
1947 try:
1948 prehttpheaderout = httpheaderout;
1949 httpheaderkeys = httpheaderout.keys();
1950 imax = len(httpheaderkeys);
1951 ic = 0;
1952 httpheaderout = {};
1953 while(ic < imax):
1954 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
1955 ic += 1;
1956 except AttributeError:
1957 pass;
1958 httpheaderout = fix_header_names(httpheaderout);
1959 if(isinstance(httpheadersentout, list)):
1960 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
1961 httpheadersentout = fix_header_names(httpheadersentout);
1962 log.info("Downloading URL "+httpurl);
1963 if(httpheaderout.get("Content-Encoding")=="gzip" or httpheaderout.get("Content-Encoding")=="deflate"):
1964 if(sys.version[0]=="2"):
1965 strbuf = StringIO(geturls_text.raw.read());
1966 if(sys.version[0]>="3"):
1967 strbuf = BytesIO(geturls_text.raw.read());
1968 gzstrbuf = gzip.GzipFile(fileobj=strbuf);
1969 returnval_content = gzstrbuf.read()[:];
1970 if(httpheaderout.get("Content-Encoding")!="gzip" and httpheaderout.get("Content-Encoding")!="deflate" and httpheaderout.get("Content-Encoding")!="br"):
1971 returnval_content = geturls_text.raw.read()[:];
1972 if(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
1973 returnval_content = geturls_text.raw.read()[:];
1974 returnval_content = brotli.decompress(returnval_content);
1975 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
1976 geturls_text.close();
1977 return returnval;
1979 if(not haverequests):
1980 def download_from_url_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
1981 returnval = download_from_url_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep)
1982 return returnval;
1984 if(haverequests):
1985 def download_from_url_file_with_requests(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
1986 global geturls_download_sleep, tmpfileprefix, tmpfilesuffix;
1987 exec_time_start = time.time();
1988 myhash = hashlib.new("sha1");
1989 if(sys.version[0]=="2"):
1990 myhash.update(httpurl);
1991 myhash.update(str(buffersize));
1992 myhash.update(str(exec_time_start));
1993 if(sys.version[0]>="3"):
1994 myhash.update(httpurl.encode('utf-8'));
1995 myhash.update(str(buffersize).encode('utf-8'));
1996 myhash.update(str(exec_time_start).encode('utf-8'));
1997 newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest());
1998 if(sleep<0):
1999 sleep = geturls_download_sleep;
2000 urlparts = urlparse.urlparse(httpurl);
2001 if(isinstance(httpheaders, list)):
2002 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
2003 httpheaders = fix_header_names(httpheaders);
2004 if(httpuseragent is not None):
2005 if('User-Agent' in httpheaders):
2006 httpheaders['User-Agent'] = httpuseragent;
2007 else:
2008 httpuseragent.update({'User-Agent': httpuseragent});
2009 if(httpreferer is not None):
2010 if('Referer' in httpheaders):
2011 httpheaders['Referer'] = httpreferer;
2012 else:
2013 httpuseragent.update({'Referer': httpreferer});
2014 if(urlparts.username is not None or urlparts.password is not None):
2015 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
2016 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
2017 time.sleep(sleep);
2018 if(postdata is not None and not isinstance(postdata, dict)):
2019 postdata = urlencode(postdata);
2020 try:
2021 if(httpmethod=="GET"):
2022 geturls_text = requests.get(httpurl, headers=httpheaders, cookies=httpcookie, stream=True);
2023 elif(httpmethod=="POST"):
2024 geturls_text = requests.post(httpurl, data=postdata, headers=httpheaders, cookies=httpcookie, stream=True);
2025 else:
2026 geturls_text = requests.get(httpurl, headers=httpheaders, cookies=httpcookie, stream=True);
2027 except requests.exceptions.ConnectTimeout:
2028 log.info("Error With URL "+httpurl);
2029 return False;
2030 except requests.exceptions.ConnectError:
2031 log.info("Error With URL "+httpurl);
2032 return False;
2033 except socket.timeout:
2034 log.info("Error With URL "+httpurl);
2035 return False;
2036 httpcodeout = geturls_text.status_code;
2037 httpversionout = "1.1";
2038 httpmethodout = httpmethod;
2039 httpurlout = geturls_text.url;
2040 httpheaderout = geturls_text.headers;
2041 httpheadersentout = httpheaders;
2042 if(isinstance(httpheaderout, list)):
2043 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
2044 if(sys.version[0]=="2"):
2045 try:
2046 prehttpheaderout = httpheaderout;
2047 httpheaderkeys = httpheaderout.keys();
2048 imax = len(httpheaderkeys);
2049 ic = 0;
2050 httpheaderout = {};
2051 while(ic < imax):
2052 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
2053 ic += 1;
2054 except AttributeError:
2055 pass;
2056 httpheaderout = fix_header_names(httpheaderout);
2057 if(isinstance(httpheadersentout, list)):
2058 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
2059 httpheadersentout = fix_header_names(httpheadersentout);
2060 downloadsize = int(httpheaderout.get('Content-Length'));
2061 if(downloadsize is not None):
2062 downloadsize = int(downloadsize);
2063 if downloadsize is None: downloadsize = 0;
2064 fulldatasize = 0;
2065 prevdownsize = 0;
2066 log.info("Downloading URL "+httpurl);
2067 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
2068 tmpfilename = f.name;
2069 try:
2070 os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple())));
2071 except AttributeError:
2072 try:
2073 os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
2074 except ValueError:
2075 pass;
2076 except ValueError:
2077 pass;
2078 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
2079 for databytes in geturls_text.iter_content(chunk_size=buffersize):
2080 datasize = len(databytes);
2081 fulldatasize = datasize + fulldatasize;
2082 percentage = "";
2083 if(downloadsize>0):
2084 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
2085 downloaddiff = fulldatasize - prevdownsize;
2086 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
2087 prevdownsize = fulldatasize;
2088 f.write(databytes);
2089 f.close();
2090 geturls_text.close();
2091 exec_time_end = time.time();
2092 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to download file.");
2093 returnval.update({'Filesize': os.path.getsize(tmpfilename), 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)});
2094 return returnval;
2096 if(not haverequests):
2097 def download_from_url_file_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
2098 returnval = download_from_url_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep)
2099 return returnval;
2101 if(haverequests):
2102 def download_from_url_to_file_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
2103 global geturls_download_sleep;
2104 if(sleep<0):
2105 sleep = geturls_download_sleep;
2106 if(not outfile=="-"):
2107 outpath = outpath.rstrip(os.path.sep);
2108 filepath = os.path.realpath(outpath+os.path.sep+outfile);
2109 if(not os.path.exists(outpath)):
2110 os.makedirs(outpath);
2111 if(os.path.exists(outpath) and os.path.isfile(outpath)):
2112 return False;
2113 if(os.path.exists(filepath) and os.path.isdir(filepath)):
2114 return False;
2115 pretmpfilename = download_from_url_file_with_requests(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
2116 if(not pretmpfilename):
2117 return False;
2118 tmpfilename = pretmpfilename['Filename'];
2119 downloadsize = os.path.getsize(tmpfilename);
2120 fulldatasize = 0;
2121 log.info("Moving file "+tmpfilename+" to "+filepath);
2122 exec_time_start = time.time();
2123 shutil.move(tmpfilename, filepath);
2124 try:
2125 os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple())));
2126 except AttributeError:
2127 try:
2128 os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
2129 except ValueError:
2130 pass;
2131 except ValueError:
2132 pass;
2133 exec_time_end = time.time();
2134 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to move file.");
2135 if(os.path.exists(tmpfilename)):
2136 os.remove(tmpfilename);
2137 returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
2138 if(outfile=="-" and sys.version[0]=="2"):
2139 pretmpfilename = download_from_url_file_with_requests(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
2140 if(not pretmpfilename):
2141 return False;
2142 tmpfilename = pretmpfilename['Filename'];
2143 downloadsize = os.path.getsize(tmpfilename);
2144 fulldatasize = 0;
2145 prevdownsize = 0;
2146 exec_time_start = time.time();
2147 with open(tmpfilename, 'rb') as ft:
2148 f = StringIO();
2149 while True:
2150 databytes = ft.read(buffersize[1]);
2151 if not databytes: break;
2152 datasize = len(databytes);
2153 fulldatasize = datasize + fulldatasize;
2154 percentage = "";
2155 if(downloadsize>0):
2156 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
2157 downloaddiff = fulldatasize - prevdownsize;
2158 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
2159 prevdownsize = fulldatasize;
2160 f.write(databytes);
2161 f.seek(0);
2162 fdata = f.getvalue();
2163 f.close();
2164 ft.close();
2165 os.remove(tmpfilename);
2166 exec_time_end = time.time();
2167 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
2168 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': ['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
2169 if(outfile=="-" and sys.version[0]>="3"):
2170 pretmpfilename = download_from_url_file_with_requests(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
2171 tmpfilename = pretmpfilename['Filename'];
2172 downloadsize = os.path.getsize(tmpfilename);
2173 fulldatasize = 0;
2174 prevdownsize = 0;
2175 exec_time_start = time.time();
2176 with open(tmpfilename, 'rb') as ft:
2177 f = BytesIO();
2178 while True:
2179 databytes = ft.read(buffersize[1]);
2180 if not databytes: break;
2181 datasize = len(databytes);
2182 fulldatasize = datasize + fulldatasize;
2183 percentage = "";
2184 if(downloadsize>0):
2185 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
2186 downloaddiff = fulldatasize - prevdownsize;
2187 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
2188 prevdownsize = fulldatasize;
2189 f.write(databytes);
2190 f.seek(0);
2191 fdata = f.getvalue();
2192 f.close();
2193 ft.close();
2194 os.remove(tmpfilename);
2195 exec_time_end = time.time();
2196 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
2197 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
2198 return returnval;
2200 if(not haverequests):
2201 def download_from_url_to_file_with_requests(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
2202 returnval = download_from_url_to_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep)
2203 return returnval;
2205 if(havehttpx):
2206 def download_from_url_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
2207 global geturls_download_sleep, havebrotli;
2208 if(sleep<0):
2209 sleep = geturls_download_sleep;
2210 urlparts = urlparse.urlparse(httpurl);
2211 if(isinstance(httpheaders, list)):
2212 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
2213 httpheaders = fix_header_names(httpheaders);
2214 if(httpuseragent is not None):
2215 if('User-Agent' in httpheaders):
2216 httpheaders['User-Agent'] = httpuseragent;
2217 else:
2218 httpuseragent.update({'User-Agent': httpuseragent});
2219 if(httpreferer is not None):
2220 if('Referer' in httpheaders):
2221 httpheaders['Referer'] = httpreferer;
2222 else:
2223 httpuseragent.update({'Referer': httpreferer});
2224 if(urlparts.username is not None or urlparts.password is not None):
2225 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
2226 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
2227 time.sleep(sleep);
2228 if(postdata is not None and not isinstance(postdata, dict)):
2229 postdata = urlencode(postdata);
2230 try:
2231 if(httpmethod=="GET"):
2232 httpx_pool = httpx.Client(http1=True, http2=False, trust_env=True);
2233 geturls_text = httpx_pool.get(httpurl, headers=httpheaders, cookies=httpcookie);
2234 elif(httpmethod=="POST"):
2235 httpx_pool = httpx.Client(http1=True, http2=False, trust_env=True);
2236 geturls_text = httpx_pool.post(httpurl, data=postdata, headers=httpheaders, cookies=httpcookie);
2237 else:
2238 httpx_pool = httpx.Client(http1=True, http2=False, trust_env=True);
2239 geturls_text = httpx_pool.get(httpurl, headers=httpheaders, cookies=httpcookie);
2240 except httpx.ConnectTimeout:
2241 log.info("Error With URL "+httpurl);
2242 return False;
2243 except httpx.ConnectError:
2244 log.info("Error With URL "+httpurl);
2245 return False;
2246 except socket.timeout:
2247 log.info("Error With URL "+httpurl);
2248 return False;
2249 httpcodeout = geturls_text.status_code;
2250 httpversionout = geturls_text.http_version;
2251 httpmethodout = httpmethod;
2252 httpurlout = str(geturls_text.url);
2253 httpheaderout = geturls_text.headers;
2254 httpheadersentout = httpheaders;
2255 if(isinstance(httpheaderout, list)):
2256 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
2257 if(sys.version[0]=="2"):
2258 try:
2259 prehttpheaderout = httpheaderout;
2260 httpheaderkeys = httpheaderout.keys();
2261 imax = len(httpheaderkeys);
2262 ic = 0;
2263 httpheaderout = {};
2264 while(ic < imax):
2265 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
2266 ic += 1;
2267 except AttributeError:
2268 pass;
2269 httpheaderout = fix_header_names(httpheaderout);
2270 if(isinstance(httpheadersentout, list)):
2271 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
2272 httpheadersentout = fix_header_names(httpheadersentout);
2273 log.info("Downloading URL "+httpurl);
2274 if(httpheaderout.get("Content-Encoding")=="gzip" or httpheaderout.get("Content-Encoding")=="deflate"):
2275 if(sys.version[0]=="2"):
2276 strbuf = StringIO(geturls_text.read());
2277 if(sys.version[0]>="3"):
2278 strbuf = BytesIO(geturls_text.read());
2279 gzstrbuf = gzip.GzipFile(fileobj=strbuf);
2280 returnval_content = gzstrbuf.read()[:];
2281 if(httpheaderout.get("Content-Encoding")!="gzip" and httpheaderout.get("Content-Encoding")!="deflate" and httpheaderout.get("Content-Encoding")!="br"):
2282 returnval_content = geturls_text.read()[:];
2283 if(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
2284 returnval_content = geturls_text.read()[:];
2285 returnval_content = brotli.decompress(returnval_content);
2286 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
2287 geturls_text.close();
2288 return returnval;
2290 if(not havehttpx):
2291 def download_from_url_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
2292 returnval = download_from_url_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep)
2293 return returnval;
2295 if(havehttpx):
2296 def download_from_url_file_with_httpx(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
2297 global geturls_download_sleep, tmpfileprefix, tmpfilesuffix;
2298 exec_time_start = time.time();
2299 myhash = hashlib.new("sha1");
2300 if(sys.version[0]=="2"):
2301 myhash.update(httpurl);
2302 myhash.update(str(buffersize));
2303 myhash.update(str(exec_time_start));
2304 if(sys.version[0]>="3"):
2305 myhash.update(httpurl.encode('utf-8'));
2306 myhash.update(str(buffersize).encode('utf-8'));
2307 myhash.update(str(exec_time_start).encode('utf-8'));
2308 newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest());
2309 if(sleep<0):
2310 sleep = geturls_download_sleep;
2311 urlparts = urlparse.urlparse(httpurl);
2312 if(isinstance(httpheaders, list)):
2313 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
2314 httpheaders = fix_header_names(httpheaders);
2315 if(httpuseragent is not None):
2316 if('User-Agent' in httpheaders):
2317 httpheaders['User-Agent'] = httpuseragent;
2318 else:
2319 httpuseragent.update({'User-Agent': httpuseragent});
2320 if(httpreferer is not None):
2321 if('Referer' in httpheaders):
2322 httpheaders['Referer'] = httpreferer;
2323 else:
2324 httpuseragent.update({'Referer': httpreferer});
2325 if(urlparts.username is not None or urlparts.password is not None):
2326 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
2327 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
2328 time.sleep(sleep);
2329 if(postdata is not None and not isinstance(postdata, dict)):
2330 postdata = urlencode(postdata);
2331 try:
2332 if(httpmethod=="GET"):
2333 httpx_pool = httpx.Client(http1=True, http2=False, trust_env=True);
2334 geturls_text = httpx_pool.get(httpurl, headers=httpheaders, cookies=httpcookie);
2335 elif(httpmethod=="POST"):
2336 httpx_pool = httpx.Client(http1=True, http2=False, trust_env=True);
2337 geturls_text = httpx_pool.post(httpurl, data=postdata, headers=httpheaders, cookies=httpcookie);
2338 else:
2339 httpx_pool = httpx.Client(http1=True, http2=False, trust_env=True);
2340 geturls_text = httpx_pool.get(httpurl, headers=httpheaders, cookies=httpcookie);
2341 except httpx.ConnectTimeout:
2342 log.info("Error With URL "+httpurl);
2343 return False;
2344 except httpx.ConnectError:
2345 log.info("Error With URL "+httpurl);
2346 return False;
2347 except socket.timeout:
2348 log.info("Error With URL "+httpurl);
2349 return False;
2350 httpcodeout = geturls_text.status_code;
2351 httpversionout = geturls_text.http_version;
2352 httpmethodout = httpmethod;
2353 httpurlout = str(geturls_text.url);
2354 httpheaderout = geturls_text.headers;
2355 httpheadersentout = httpheaders;
2356 if(isinstance(httpheaderout, list)):
2357 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
2358 if(sys.version[0]=="2"):
2359 try:
2360 prehttpheaderout = httpheaderout;
2361 httpheaderkeys = httpheaderout.keys();
2362 imax = len(httpheaderkeys);
2363 ic = 0;
2364 httpheaderout = {};
2365 while(ic < imax):
2366 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
2367 ic += 1;
2368 except AttributeError:
2369 pass;
2370 httpheaderout = fix_header_names(httpheaderout);
2371 if(isinstance(httpheadersentout, list)):
2372 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
2373 httpheadersentout = fix_header_names(httpheadersentout);
2374 downloadsize = int(httpheaderout.get('Content-Length'));
2375 if(downloadsize is not None):
2376 downloadsize = int(downloadsize);
2377 if downloadsize is None: downloadsize = 0;
2378 fulldatasize = 0;
2379 prevdownsize = 0;
2380 log.info("Downloading URL "+httpurl);
2381 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
2382 tmpfilename = f.name;
2383 try:
2384 os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple())));
2385 except AttributeError:
2386 try:
2387 os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
2388 except ValueError:
2389 pass;
2390 except ValueError:
2391 pass;
2392 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
2393 for databytes in geturls_text.iter_content(chunk_size=buffersize):
2394 datasize = len(databytes);
2395 fulldatasize = datasize + fulldatasize;
2396 percentage = "";
2397 if(downloadsize>0):
2398 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
2399 downloaddiff = fulldatasize - prevdownsize;
2400 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
2401 prevdownsize = fulldatasize;
2402 f.write(databytes);
2403 f.close();
2404 geturls_text.close();
2405 exec_time_end = time.time();
2406 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to download file.");
2407 returnval.update({'Filesize': os.path.getsize(tmpfilename), 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)});
2408 return returnval;
2410 if(not havehttpx):
2411 def download_from_url_file_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
2412 returnval = download_from_url_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep)
2413 return returnval;
2415 if(havehttpx):
2416 def download_from_url_to_file_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
2417 global geturls_download_sleep;
2418 if(sleep<0):
2419 sleep = geturls_download_sleep;
2420 if(not outfile=="-"):
2421 outpath = outpath.rstrip(os.path.sep);
2422 filepath = os.path.realpath(outpath+os.path.sep+outfile);
2423 if(not os.path.exists(outpath)):
2424 os.makedirs(outpath);
2425 if(os.path.exists(outpath) and os.path.isfile(outpath)):
2426 return False;
2427 if(os.path.exists(filepath) and os.path.isdir(filepath)):
2428 return False;
2429 pretmpfilename = download_from_url_file_with_httpx(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
2430 if(not pretmpfilename):
2431 return False;
2432 tmpfilename = pretmpfilename['Filename'];
2433 downloadsize = os.path.getsize(tmpfilename);
2434 fulldatasize = 0;
2435 log.info("Moving file "+tmpfilename+" to "+filepath);
2436 exec_time_start = time.time();
2437 shutil.move(tmpfilename, filepath);
2438 try:
2439 os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple())));
2440 except AttributeError:
2441 try:
2442 os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
2443 except ValueError:
2444 pass;
2445 except ValueError:
2446 pass;
2447 exec_time_end = time.time();
2448 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to move file.");
2449 if(os.path.exists(tmpfilename)):
2450 os.remove(tmpfilename);
2451 returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
2452 if(outfile=="-" and sys.version[0]=="2"):
2453 pretmpfilename = download_from_url_file_with_httpx(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
2454 if(not pretmpfilename):
2455 return False;
2456 tmpfilename = pretmpfilename['Filename'];
2457 downloadsize = os.path.getsize(tmpfilename);
2458 fulldatasize = 0;
2459 prevdownsize = 0;
2460 exec_time_start = time.time();
2461 with open(tmpfilename, 'rb') as ft:
2462 f = StringIO();
2463 while True:
2464 databytes = ft.read(buffersize[1]);
2465 if not databytes: break;
2466 datasize = len(databytes);
2467 fulldatasize = datasize + fulldatasize;
2468 percentage = "";
2469 if(downloadsize>0):
2470 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
2471 downloaddiff = fulldatasize - prevdownsize;
2472 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
2473 prevdownsize = fulldatasize;
2474 f.write(databytes);
2475 f.seek(0);
2476 fdata = f.getvalue();
2477 f.close();
2478 ft.close();
2479 os.remove(tmpfilename);
2480 exec_time_end = time.time();
2481 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
2482 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': ['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
2483 if(outfile=="-" and sys.version[0]>="3"):
2484 pretmpfilename = download_from_url_file_with_httpx(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
2485 tmpfilename = pretmpfilename['Filename'];
2486 downloadsize = os.path.getsize(tmpfilename);
2487 fulldatasize = 0;
2488 prevdownsize = 0;
2489 exec_time_start = time.time();
2490 with open(tmpfilename, 'rb') as ft:
2491 f = BytesIO();
2492 while True:
2493 databytes = ft.read(buffersize[1]);
2494 if not databytes: break;
2495 datasize = len(databytes);
2496 fulldatasize = datasize + fulldatasize;
2497 percentage = "";
2498 if(downloadsize>0):
2499 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
2500 downloaddiff = fulldatasize - prevdownsize;
2501 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
2502 prevdownsize = fulldatasize;
2503 f.write(databytes);
2504 f.seek(0);
2505 fdata = f.getvalue();
2506 f.close();
2507 ft.close();
2508 os.remove(tmpfilename);
2509 exec_time_end = time.time();
2510 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
2511 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
2512 return returnval;
2514 if(not havehttpx):
2515 def download_from_url_to_file_with_httpx(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
2516 returnval = download_from_url_to_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep)
2517 return returnval;
2519 if(havehttpx):
2520 def download_from_url_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
2521 global geturls_download_sleep, havebrotli;
2522 if(sleep<0):
2523 sleep = geturls_download_sleep;
2524 urlparts = urlparse.urlparse(httpurl);
2525 if(isinstance(httpheaders, list)):
2526 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
2527 httpheaders = fix_header_names(httpheaders);
2528 if(httpuseragent is not None):
2529 if('User-Agent' in httpheaders):
2530 httpheaders['User-Agent'] = httpuseragent;
2531 else:
2532 httpuseragent.update({'User-Agent': httpuseragent});
2533 if(httpreferer is not None):
2534 if('Referer' in httpheaders):
2535 httpheaders['Referer'] = httpreferer;
2536 else:
2537 httpuseragent.update({'Referer': httpreferer});
2538 if(urlparts.username is not None or urlparts.password is not None):
2539 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
2540 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
2541 time.sleep(sleep);
2542 if(postdata is not None and not isinstance(postdata, dict)):
2543 postdata = urlencode(postdata);
2544 try:
2545 if(httpmethod=="GET"):
2546 httpx_pool = httpx.Client(http1=True, http2=True, trust_env=True);
2547 geturls_text = httpx_pool.get(httpurl, headers=httpheaders, cookies=httpcookie);
2548 elif(httpmethod=="POST"):
2549 httpx_pool = httpx.Client(http1=True, http2=True, trust_env=True);
2550 geturls_text = httpx_pool.post(httpurl, data=postdata, headers=httpheaders, cookies=httpcookie);
2551 else:
2552 httpx_pool = httpx.Client(http1=True, http2=True, trust_env=True);
2553 geturls_text = httpx_pool.get(httpurl, headers=httpheaders, cookies=httpcookie);
2554 except httpx.ConnectTimeout:
2555 log.info("Error With URL "+httpurl);
2556 return False;
2557 except httpx.ConnectError:
2558 log.info("Error With URL "+httpurl);
2559 return False;
2560 except socket.timeout:
2561 log.info("Error With URL "+httpurl);
2562 return False;
2563 httpcodeout = geturls_text.status_code;
2564 httpversionout = geturls_text.http_version;
2565 httpmethodout = httpmethod;
2566 httpurlout = str(geturls_text.url);
2567 httpheaderout = geturls_text.headers;
2568 httpheadersentout = httpheaders;
2569 if(isinstance(httpheaderout, list)):
2570 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
2571 if(sys.version[0]=="2"):
2572 try:
2573 prehttpheaderout = httpheaderout;
2574 httpheaderkeys = httpheaderout.keys();
2575 imax = len(httpheaderkeys);
2576 ic = 0;
2577 httpheaderout = {};
2578 while(ic < imax):
2579 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
2580 ic += 1;
2581 except AttributeError:
2582 pass;
2583 httpheaderout = fix_header_names(httpheaderout);
2584 if(isinstance(httpheadersentout, list)):
2585 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
2586 httpheadersentout = fix_header_names(httpheadersentout);
2587 log.info("Downloading URL "+httpurl);
2588 if(httpheaderout.get("Content-Encoding")=="gzip" or httpheaderout.get("Content-Encoding")=="deflate"):
2589 if(sys.version[0]=="2"):
2590 strbuf = StringIO(geturls_text.read());
2591 if(sys.version[0]>="3"):
2592 strbuf = BytesIO(geturls_text.read());
2593 gzstrbuf = gzip.GzipFile(fileobj=strbuf);
2594 returnval_content = gzstrbuf.read()[:];
2595 if(httpheaderout.get("Content-Encoding")!="gzip" and httpheaderout.get("Content-Encoding")!="deflate" and httpheaderout.get("Content-Encoding")!="br"):
2596 returnval_content = geturls_text.read()[:];
2597 if(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
2598 returnval_content = geturls_text.read()[:];
2599 returnval_content = brotli.decompress(returnval_content);
2600 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
2601 geturls_text.close();
2602 return returnval;
2604 if(not havehttpx):
2605 def download_from_url_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
2606 returnval = download_from_url_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep)
2607 return returnval;
2609 if(havehttpx):
2610 def download_from_url_file_with_httpx2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
2611 global geturls_download_sleep, tmpfileprefix, tmpfilesuffix;
2612 exec_time_start = time.time();
2613 myhash = hashlib.new("sha1");
2614 if(sys.version[0]=="2"):
2615 myhash.update(httpurl);
2616 myhash.update(str(buffersize));
2617 myhash.update(str(exec_time_start));
2618 if(sys.version[0]>="3"):
2619 myhash.update(httpurl.encode('utf-8'));
2620 myhash.update(str(buffersize).encode('utf-8'));
2621 myhash.update(str(exec_time_start).encode('utf-8'));
2622 newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest());
2623 if(sleep<0):
2624 sleep = geturls_download_sleep;
2625 urlparts = urlparse.urlparse(httpurl);
2626 if(isinstance(httpheaders, list)):
2627 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
2628 httpheaders = fix_header_names(httpheaders);
2629 if(httpuseragent is not None):
2630 if('User-Agent' in httpheaders):
2631 httpheaders['User-Agent'] = httpuseragent;
2632 else:
2633 httpuseragent.update({'User-Agent': httpuseragent});
2634 if(httpreferer is not None):
2635 if('Referer' in httpheaders):
2636 httpheaders['Referer'] = httpreferer;
2637 else:
2638 httpuseragent.update({'Referer': httpreferer});
2639 if(urlparts.username is not None or urlparts.password is not None):
2640 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
2641 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
2642 time.sleep(sleep);
2643 if(postdata is not None and not isinstance(postdata, dict)):
2644 postdata = urlencode(postdata);
2645 try:
2646 if(httpmethod=="GET"):
2647 httpx_pool = httpx.Client(http1=True, http2=True, trust_env=True);
2648 geturls_text = httpx_pool.get(httpurl, headers=httpheaders, cookies=httpcookie);
2649 elif(httpmethod=="POST"):
2650 httpx_pool = httpx.Client(http1=True, http2=True, trust_env=True);
2651 geturls_text = httpx_pool.post(httpurl, data=postdata, headers=httpheaders, cookies=httpcookie);
2652 else:
2653 httpx_pool = httpx.Client(http1=True, http2=True, trust_env=True);
2654 geturls_text = httpx_pool.get(httpurl, headers=httpheaders, cookies=httpcookie);
2655 except httpx.ConnectTimeout:
2656 log.info("Error With URL "+httpurl);
2657 return False;
2658 except httpx.ConnectError:
2659 log.info("Error With URL "+httpurl);
2660 return False;
2661 except socket.timeout:
2662 log.info("Error With URL "+httpurl);
2663 return False;
2664 httpcodeout = geturls_text.status_code;
2665 httpversionout = geturls_text.http_version;
2666 httpmethodout = httpmethod;
2667 httpurlout = str(geturls_text.url);
2668 httpheaderout = geturls_text.headers;
2669 httpheadersentout = httpheaders;
2670 if(isinstance(httpheaderout, list)):
2671 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
2672 if(sys.version[0]=="2"):
2673 try:
2674 prehttpheaderout = httpheaderout;
2675 httpheaderkeys = httpheaderout.keys();
2676 imax = len(httpheaderkeys);
2677 ic = 0;
2678 httpheaderout = {};
2679 while(ic < imax):
2680 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
2681 ic += 1;
2682 except AttributeError:
2683 pass;
2684 httpheaderout = fix_header_names(httpheaderout);
2685 if(isinstance(httpheadersentout, list)):
2686 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
2687 httpheadersentout = fix_header_names(httpheadersentout);
2688 downloadsize = int(httpheaderout.get('Content-Length'));
2689 if(downloadsize is not None):
2690 downloadsize = int(downloadsize);
2691 if downloadsize is None: downloadsize = 0;
2692 fulldatasize = 0;
2693 prevdownsize = 0;
2694 log.info("Downloading URL "+httpurl);
2695 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
2696 tmpfilename = f.name;
2697 try:
2698 os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple())));
2699 except AttributeError:
2700 try:
2701 os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
2702 except ValueError:
2703 pass;
2704 except ValueError:
2705 pass;
2706 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
2707 for databytes in geturls_text.iter_content(chunk_size=buffersize):
2708 datasize = len(databytes);
2709 fulldatasize = datasize + fulldatasize;
2710 percentage = "";
2711 if(downloadsize>0):
2712 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
2713 downloaddiff = fulldatasize - prevdownsize;
2714 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
2715 prevdownsize = fulldatasize;
2716 f.write(databytes);
2717 f.close();
2718 geturls_text.close();
2719 exec_time_end = time.time();
2720 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to download file.");
2721 returnval.update({'Filesize': os.path.getsize(tmpfilename), 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)});
2722 return returnval;
2724 if(not havehttpx):
2725 def download_from_url_file_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
2726 returnval = download_from_url_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep)
2727 return returnval;
2729 if(havehttpx):
2730 def download_from_url_to_file_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
2731 global geturls_download_sleep;
2732 if(sleep<0):
2733 sleep = geturls_download_sleep;
2734 if(not outfile=="-"):
2735 outpath = outpath.rstrip(os.path.sep);
2736 filepath = os.path.realpath(outpath+os.path.sep+outfile);
2737 if(not os.path.exists(outpath)):
2738 os.makedirs(outpath);
2739 if(os.path.exists(outpath) and os.path.isfile(outpath)):
2740 return False;
2741 if(os.path.exists(filepath) and os.path.isdir(filepath)):
2742 return False;
2743 pretmpfilename = download_from_url_file_with_httpx2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
2744 if(not pretmpfilename):
2745 return False;
2746 tmpfilename = pretmpfilename['Filename'];
2747 downloadsize = os.path.getsize(tmpfilename);
2748 fulldatasize = 0;
2749 log.info("Moving file "+tmpfilename+" to "+filepath);
2750 exec_time_start = time.time();
2751 shutil.move(tmpfilename, filepath);
2752 try:
2753 os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple())));
2754 except AttributeError:
2755 try:
2756 os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
2757 except ValueError:
2758 pass;
2759 except ValueError:
2760 pass;
2761 exec_time_end = time.time();
2762 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to move file.");
2763 if(os.path.exists(tmpfilename)):
2764 os.remove(tmpfilename);
2765 returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
2766 if(outfile=="-" and sys.version[0]=="2"):
2767 pretmpfilename = download_from_url_file_with_httpx2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
2768 if(not pretmpfilename):
2769 return False;
2770 tmpfilename = pretmpfilename['Filename'];
2771 downloadsize = os.path.getsize(tmpfilename);
2772 fulldatasize = 0;
2773 prevdownsize = 0;
2774 exec_time_start = time.time();
2775 with open(tmpfilename, 'rb') as ft:
2776 f = StringIO();
2777 while True:
2778 databytes = ft.read(buffersize[1]);
2779 if not databytes: break;
2780 datasize = len(databytes);
2781 fulldatasize = datasize + fulldatasize;
2782 percentage = "";
2783 if(downloadsize>0):
2784 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
2785 downloaddiff = fulldatasize - prevdownsize;
2786 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
2787 prevdownsize = fulldatasize;
2788 f.write(databytes);
2789 f.seek(0);
2790 fdata = f.getvalue();
2791 f.close();
2792 ft.close();
2793 os.remove(tmpfilename);
2794 exec_time_end = time.time();
2795 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
2796 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': ['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
2797 if(outfile=="-" and sys.version[0]>="3"):
2798 pretmpfilename = download_from_url_file_with_httpx2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
2799 tmpfilename = pretmpfilename['Filename'];
2800 downloadsize = os.path.getsize(tmpfilename);
2801 fulldatasize = 0;
2802 prevdownsize = 0;
2803 exec_time_start = time.time();
2804 with open(tmpfilename, 'rb') as ft:
2805 f = BytesIO();
2806 while True:
2807 databytes = ft.read(buffersize[1]);
2808 if not databytes: break;
2809 datasize = len(databytes);
2810 fulldatasize = datasize + fulldatasize;
2811 percentage = "";
2812 if(downloadsize>0):
2813 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
2814 downloaddiff = fulldatasize - prevdownsize;
2815 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
2816 prevdownsize = fulldatasize;
2817 f.write(databytes);
2818 f.seek(0);
2819 fdata = f.getvalue();
2820 f.close();
2821 ft.close();
2822 os.remove(tmpfilename);
2823 exec_time_end = time.time();
2824 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
2825 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
2826 return returnval;
2828 if(not havehttpx):
2829 def download_from_url_to_file_with_httpx2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
2830 returnval = download_from_url_to_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep)
2831 return returnval;
2833 if(havehttpcore):
2834 def download_from_url_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
2835 global geturls_download_sleep, havebrotli;
2836 if(sleep<0):
2837 sleep = geturls_download_sleep;
2838 urlparts = urlparse.urlparse(httpurl);
2839 if(isinstance(httpheaders, list)):
2840 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
2841 httpheaders = fix_header_names(httpheaders);
2842 if(httpuseragent is not None):
2843 if('User-Agent' in httpheaders):
2844 httpheaders['User-Agent'] = httpuseragent;
2845 else:
2846 httpuseragent.update({'User-Agent': httpuseragent});
2847 if(httpreferer is not None):
2848 if('Referer' in httpheaders):
2849 httpheaders['Referer'] = httpreferer;
2850 else:
2851 httpuseragent.update({'Referer': httpreferer});
2852 if(urlparts.username is not None or urlparts.password is not None):
2853 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
2854 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
2855 time.sleep(sleep);
2856 if(postdata is not None and not isinstance(postdata, dict)):
2857 postdata = urlencode(postdata);
2858 try:
2859 if(httpmethod=="GET"):
2860 httpx_pool = httpcore.ConnectionPool(http1=True, http2=False);
2861 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
2862 elif(httpmethod=="POST"):
2863 httpx_pool = httpcore.ConnectionPool(http1=True, http2=False);
2864 geturls_text = httpx_pool.request("GET", httpurl, data=postdata, headers=httpheaders);
2865 else:
2866 httpx_pool = httpcore.ConnectionPool(http1=True, http2=False);
2867 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
2868 except httpcore.ConnectTimeout:
2869 log.info("Error With URL "+httpurl);
2870 return False;
2871 except httpcore.ConnectError:
2872 log.info("Error With URL "+httpurl);
2873 return False;
2874 except socket.timeout:
2875 log.info("Error With URL "+httpurl);
2876 return False;
2877 httpcodeout = geturls_text.status;
2878 httpversionout = "1.1";
2879 httpmethodout = httpmethod;
2880 httpurlout = str(httpurl);
2881 httpheaderout = geturls_text.headers;
2882 httpheadersentout = httpheaders;
2883 if(isinstance(httpheaderout, list)):
2884 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
2885 if(sys.version[0]=="2"):
2886 try:
2887 prehttpheaderout = httpheaderout;
2888 httpheaderkeys = httpheaderout.keys();
2889 imax = len(httpheaderkeys);
2890 ic = 0;
2891 httpheaderout = {};
2892 while(ic < imax):
2893 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
2894 ic += 1;
2895 except AttributeError:
2896 pass;
2897 httpheaderout = fix_header_names(httpheaderout);
2898 if(isinstance(httpheadersentout, list)):
2899 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
2900 httpheadersentout = fix_header_names(httpheadersentout);
2901 log.info("Downloading URL "+httpurl);
2902 if(httpheaderout.get("Content-Encoding")=="gzip" or httpheaderout.get("Content-Encoding")=="deflate"):
2903 if(sys.version[0]=="2"):
2904 strbuf = StringIO(geturls_text.read());
2905 if(sys.version[0]>="3"):
2906 strbuf = BytesIO(geturls_text.read());
2907 gzstrbuf = gzip.GzipFile(fileobj=strbuf);
2908 returnval_content = gzstrbuf.read()[:];
2909 if(httpheaderout.get("Content-Encoding")!="gzip" and httpheaderout.get("Content-Encoding")!="deflate" and httpheaderout.get("Content-Encoding")!="br"):
2910 returnval_content = geturls_text.read()[:];
2911 if(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
2912 returnval_content = geturls_text.read()[:];
2913 returnval_content = brotli.decompress(returnval_content);
2914 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
2915 geturls_text.close();
2916 return returnval;
2918 if(not havehttpcore):
2919 def download_from_url_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
2920 returnval = download_from_url_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep)
2921 return returnval;
2923 if(havehttpcore):
2924 def download_from_url_file_with_httpcore(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
2925 global geturls_download_sleep, tmpfileprefix, tmpfilesuffix;
2926 exec_time_start = time.time();
2927 myhash = hashlib.new("sha1");
2928 if(sys.version[0]=="2"):
2929 myhash.update(httpurl);
2930 myhash.update(str(buffersize));
2931 myhash.update(str(exec_time_start));
2932 if(sys.version[0]>="3"):
2933 myhash.update(httpurl.encode('utf-8'));
2934 myhash.update(str(buffersize).encode('utf-8'));
2935 myhash.update(str(exec_time_start).encode('utf-8'));
2936 newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest());
2937 if(sleep<0):
2938 sleep = geturls_download_sleep;
2939 urlparts = urlparse.urlparse(httpurl);
2940 if(isinstance(httpheaders, list)):
2941 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
2942 httpheaders = fix_header_names(httpheaders);
2943 if(httpuseragent is not None):
2944 if('User-Agent' in httpheaders):
2945 httpheaders['User-Agent'] = httpuseragent;
2946 else:
2947 httpuseragent.update({'User-Agent': httpuseragent});
2948 if(httpreferer is not None):
2949 if('Referer' in httpheaders):
2950 httpheaders['Referer'] = httpreferer;
2951 else:
2952 httpuseragent.update({'Referer': httpreferer});
2953 if(urlparts.username is not None or urlparts.password is not None):
2954 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
2955 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
2956 time.sleep(sleep);
2957 if(postdata is not None and not isinstance(postdata, dict)):
2958 postdata = urlencode(postdata);
2959 try:
2960 if(httpmethod=="GET"):
2961 httpx_pool = httpcore.ConnectionPool(http1=True, http2=False);
2962 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
2963 elif(httpmethod=="POST"):
2964 httpx_pool = httpcore.ConnectionPool(http1=True, http2=False);
2965 geturls_text = httpx_pool.request("GET", httpurl, data=postdata, headers=httpheaders);
2966 else:
2967 httpx_pool = httpcore.ConnectionPool(http1=True, http2=False);
2968 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
2969 except httpcore.ConnectTimeout:
2970 log.info("Error With URL "+httpurl);
2971 return False;
2972 except httpcore.ConnectError:
2973 log.info("Error With URL "+httpurl);
2974 return False;
2975 except socket.timeout:
2976 log.info("Error With URL "+httpurl);
2977 return False;
2978 httpcodeout = geturls_text.status;
2979 httpversionout = "1.1";
2980 httpmethodout = httpmethod;
2981 httpurlout = str(httpurl);
2982 httpheaderout = geturls_text.headers;
2983 httpheadersentout = httpheaders;
2984 if(isinstance(httpheaderout, list)):
2985 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
2986 if(sys.version[0]=="2"):
2987 try:
2988 prehttpheaderout = httpheaderout;
2989 httpheaderkeys = httpheaderout.keys();
2990 imax = len(httpheaderkeys);
2991 ic = 0;
2992 httpheaderout = {};
2993 while(ic < imax):
2994 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
2995 ic += 1;
2996 except AttributeError:
2997 pass;
2998 httpheaderout = fix_header_names(httpheaderout);
2999 if(isinstance(httpheadersentout, list)):
3000 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
3001 httpheadersentout = fix_header_names(httpheadersentout);
3002 downloadsize = int(httpheaderout.get('Content-Length'));
3003 if(downloadsize is not None):
3004 downloadsize = int(downloadsize);
3005 if downloadsize is None: downloadsize = 0;
3006 fulldatasize = 0;
3007 prevdownsize = 0;
3008 log.info("Downloading URL "+httpurl);
3009 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
3010 tmpfilename = f.name;
3011 try:
3012 os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple())));
3013 except AttributeError:
3014 try:
3015 os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
3016 except ValueError:
3017 pass;
3018 except ValueError:
3019 pass;
3020 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
3021 for databytes in geturls_text.iter_content(chunk_size=buffersize):
3022 datasize = len(databytes);
3023 fulldatasize = datasize + fulldatasize;
3024 percentage = "";
3025 if(downloadsize>0):
3026 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
3027 downloaddiff = fulldatasize - prevdownsize;
3028 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
3029 prevdownsize = fulldatasize;
3030 f.write(databytes);
3031 f.close();
3032 geturls_text.close();
3033 exec_time_end = time.time();
3034 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to download file.");
3035 returnval.update({'Filesize': os.path.getsize(tmpfilename), 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)});
3036 return returnval;
3038 if(not havehttpcore):
3039 def download_from_url_file_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
3040 returnval = download_from_url_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep)
3041 return returnval;
3043 if(havehttpcore):
3044 def download_from_url_to_file_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
3045 global geturls_download_sleep;
3046 if(sleep<0):
3047 sleep = geturls_download_sleep;
3048 if(not outfile=="-"):
3049 outpath = outpath.rstrip(os.path.sep);
3050 filepath = os.path.realpath(outpath+os.path.sep+outfile);
3051 if(not os.path.exists(outpath)):
3052 os.makedirs(outpath);
3053 if(os.path.exists(outpath) and os.path.isfile(outpath)):
3054 return False;
3055 if(os.path.exists(filepath) and os.path.isdir(filepath)):
3056 return False;
3057 pretmpfilename = download_from_url_file_with_httpcore(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
3058 if(not pretmpfilename):
3059 return False;
3060 tmpfilename = pretmpfilename['Filename'];
3061 downloadsize = os.path.getsize(tmpfilename);
3062 fulldatasize = 0;
3063 log.info("Moving file "+tmpfilename+" to "+filepath);
3064 exec_time_start = time.time();
3065 shutil.move(tmpfilename, filepath);
3066 try:
3067 os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple())));
3068 except AttributeError:
3069 try:
3070 os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
3071 except ValueError:
3072 pass;
3073 except ValueError:
3074 pass;
3075 exec_time_end = time.time();
3076 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to move file.");
3077 if(os.path.exists(tmpfilename)):
3078 os.remove(tmpfilename);
3079 returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
3080 if(outfile=="-" and sys.version[0]=="2"):
3081 pretmpfilename = download_from_url_file_with_httpcore(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
3082 if(not pretmpfilename):
3083 return False;
3084 tmpfilename = pretmpfilename['Filename'];
3085 downloadsize = os.path.getsize(tmpfilename);
3086 fulldatasize = 0;
3087 prevdownsize = 0;
3088 exec_time_start = time.time();
3089 with open(tmpfilename, 'rb') as ft:
3090 f = StringIO();
3091 while True:
3092 databytes = ft.read(buffersize[1]);
3093 if not databytes: break;
3094 datasize = len(databytes);
3095 fulldatasize = datasize + fulldatasize;
3096 percentage = "";
3097 if(downloadsize>0):
3098 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
3099 downloaddiff = fulldatasize - prevdownsize;
3100 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
3101 prevdownsize = fulldatasize;
3102 f.write(databytes);
3103 f.seek(0);
3104 fdata = f.getvalue();
3105 f.close();
3106 ft.close();
3107 os.remove(tmpfilename);
3108 exec_time_end = time.time();
3109 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
3110 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': ['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
3111 if(outfile=="-" and sys.version[0]>="3"):
3112 pretmpfilename = download_from_url_file_with_httpcore(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
3113 tmpfilename = pretmpfilename['Filename'];
3114 downloadsize = os.path.getsize(tmpfilename);
3115 fulldatasize = 0;
3116 prevdownsize = 0;
3117 exec_time_start = time.time();
3118 with open(tmpfilename, 'rb') as ft:
3119 f = BytesIO();
3120 while True:
3121 databytes = ft.read(buffersize[1]);
3122 if not databytes: break;
3123 datasize = len(databytes);
3124 fulldatasize = datasize + fulldatasize;
3125 percentage = "";
3126 if(downloadsize>0):
3127 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
3128 downloaddiff = fulldatasize - prevdownsize;
3129 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
3130 prevdownsize = fulldatasize;
3131 f.write(databytes);
3132 f.seek(0);
3133 fdata = f.getvalue();
3134 f.close();
3135 ft.close();
3136 os.remove(tmpfilename);
3137 exec_time_end = time.time();
3138 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
3139 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
3140 return returnval;
3142 if(not havehttpcore):
3143 def download_from_url_to_file_with_httpcore(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
3144 returnval = download_from_url_to_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep)
3145 return returnval;
3147 if(havehttpcore):
3148 def download_from_url_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
3149 global geturls_download_sleep, havebrotli;
3150 if(sleep<0):
3151 sleep = geturls_download_sleep;
3152 urlparts = urlparse.urlparse(httpurl);
3153 if(isinstance(httpheaders, list)):
3154 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
3155 httpheaders = fix_header_names(httpheaders);
3156 if(httpuseragent is not None):
3157 if('User-Agent' in httpheaders):
3158 httpheaders['User-Agent'] = httpuseragent;
3159 else:
3160 httpuseragent.update({'User-Agent': httpuseragent});
3161 if(httpreferer is not None):
3162 if('Referer' in httpheaders):
3163 httpheaders['Referer'] = httpreferer;
3164 else:
3165 httpuseragent.update({'Referer': httpreferer});
3166 if(urlparts.username is not None or urlparts.password is not None):
3167 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
3168 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
3169 time.sleep(sleep);
3170 if(postdata is not None and not isinstance(postdata, dict)):
3171 postdata = urlencode(postdata);
3172 try:
3173 if(httpmethod=="GET"):
3174 httpx_pool = httpcore.ConnectionPool(http1=True, http2=True);
3175 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
3176 elif(httpmethod=="POST"):
3177 httpx_pool = httpcore.ConnectionPool(http1=True, http2=True);
3178 geturls_text = httpx_pool.request("GET", httpurl, data=postdata, headers=httpheaders);
3179 else:
3180 httpx_pool = httpcore.ConnectionPool(http1=True, http2=True);
3181 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
3182 except httpcore.ConnectTimeout:
3183 log.info("Error With URL "+httpurl);
3184 return False;
3185 except httpcore.ConnectError:
3186 log.info("Error With URL "+httpurl);
3187 return False;
3188 except socket.timeout:
3189 log.info("Error With URL "+httpurl);
3190 return False;
3191 httpcodeout = geturls_text.status;
3192 httpversionout = "1.1";
3193 httpmethodout = httpmethod;
3194 httpurlout = str(httpurl);
3195 httpheaderout = geturls_text.headers;
3196 httpheadersentout = httpheaders;
3197 if(isinstance(httpheaderout, list)):
3198 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
3199 if(sys.version[0]=="2"):
3200 try:
3201 prehttpheaderout = httpheaderout;
3202 httpheaderkeys = httpheaderout.keys();
3203 imax = len(httpheaderkeys);
3204 ic = 0;
3205 httpheaderout = {};
3206 while(ic < imax):
3207 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
3208 ic += 1;
3209 except AttributeError:
3210 pass;
3211 httpheaderout = fix_header_names(httpheaderout);
3212 if(isinstance(httpheadersentout, list)):
3213 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
3214 httpheadersentout = fix_header_names(httpheadersentout);
3215 log.info("Downloading URL "+httpurl);
3216 if(httpheaderout.get("Content-Encoding")=="gzip" or httpheaderout.get("Content-Encoding")=="deflate"):
3217 if(sys.version[0]=="2"):
3218 strbuf = StringIO(geturls_text.read());
3219 if(sys.version[0]>="3"):
3220 strbuf = BytesIO(geturls_text.read());
3221 gzstrbuf = gzip.GzipFile(fileobj=strbuf);
3222 returnval_content = gzstrbuf.read()[:];
3223 if(httpheaderout.get("Content-Encoding")!="gzip" and httpheaderout.get("Content-Encoding")!="deflate" and httpheaderout.get("Content-Encoding")!="br"):
3224 returnval_content = geturls_text.read()[:];
3225 if(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
3226 returnval_content = geturls_text.read()[:];
3227 returnval_content = brotli.decompress(returnval_content);
3228 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
3229 geturls_text.close();
3230 return returnval;
3232 if(not havehttpcore):
3233 def download_from_url_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
3234 returnval = download_from_url_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep)
3235 return returnval;
3237 if(havehttpcore):
3238 def download_from_url_file_with_httpcore2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
3239 global geturls_download_sleep, tmpfileprefix, tmpfilesuffix;
3240 exec_time_start = time.time();
3241 myhash = hashlib.new("sha1");
3242 if(sys.version[0]=="2"):
3243 myhash.update(httpurl);
3244 myhash.update(str(buffersize));
3245 myhash.update(str(exec_time_start));
3246 if(sys.version[0]>="3"):
3247 myhash.update(httpurl.encode('utf-8'));
3248 myhash.update(str(buffersize).encode('utf-8'));
3249 myhash.update(str(exec_time_start).encode('utf-8'));
3250 newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest());
3251 if(sleep<0):
3252 sleep = geturls_download_sleep;
3253 urlparts = urlparse.urlparse(httpurl);
3254 if(isinstance(httpheaders, list)):
3255 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
3256 httpheaders = fix_header_names(httpheaders);
3257 if(httpuseragent is not None):
3258 if('User-Agent' in httpheaders):
3259 httpheaders['User-Agent'] = httpuseragent;
3260 else:
3261 httpuseragent.update({'User-Agent': httpuseragent});
3262 if(httpreferer is not None):
3263 if('Referer' in httpheaders):
3264 httpheaders['Referer'] = httpreferer;
3265 else:
3266 httpuseragent.update({'Referer': httpreferer});
3267 if(urlparts.username is not None or urlparts.password is not None):
3268 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
3269 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
3270 time.sleep(sleep);
3271 if(postdata is not None and not isinstance(postdata, dict)):
3272 postdata = urlencode(postdata);
3273 try:
3274 if(httpmethod=="GET"):
3275 httpx_pool = httpcore.ConnectionPool(http1=True, http2=True);
3276 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
3277 elif(httpmethod=="POST"):
3278 httpx_pool = httpcore.ConnectionPool(http1=True, http2=True);
3279 geturls_text = httpx_pool.request("GET", httpurl, data=postdata, headers=httpheaders);
3280 else:
3281 httpx_pool = httpcore.ConnectionPool(http1=True, http2=True);
3282 geturls_text = httpx_pool.request("GET", httpurl, headers=httpheaders);
3283 except httpcore.ConnectTimeout:
3284 log.info("Error With URL "+httpurl);
3285 return False;
3286 except httpcore.ConnectError:
3287 log.info("Error With URL "+httpurl);
3288 return False;
3289 except socket.timeout:
3290 log.info("Error With URL "+httpurl);
3291 return False;
3292 httpcodeout = geturls_text.status;
3293 httpversionout = "1.1";
3294 httpmethodout = httpmethod;
3295 httpurlout = str(httpurl);
3296 httpheaderout = geturls_text.headers;
3297 httpheadersentout = httpheaders;
3298 if(isinstance(httpheaderout, list)):
3299 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
3300 if(sys.version[0]=="2"):
3301 try:
3302 prehttpheaderout = httpheaderout;
3303 httpheaderkeys = httpheaderout.keys();
3304 imax = len(httpheaderkeys);
3305 ic = 0;
3306 httpheaderout = {};
3307 while(ic < imax):
3308 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
3309 ic += 1;
3310 except AttributeError:
3311 pass;
3312 httpheaderout = fix_header_names(httpheaderout);
3313 if(isinstance(httpheadersentout, list)):
3314 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
3315 httpheadersentout = fix_header_names(httpheadersentout);
3316 downloadsize = int(httpheaderout.get('Content-Length'));
3317 if(downloadsize is not None):
3318 downloadsize = int(downloadsize);
3319 if downloadsize is None: downloadsize = 0;
3320 fulldatasize = 0;
3321 prevdownsize = 0;
3322 log.info("Downloading URL "+httpurl);
3323 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
3324 tmpfilename = f.name;
3325 try:
3326 os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple())));
3327 except AttributeError:
3328 try:
3329 os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
3330 except ValueError:
3331 pass;
3332 except ValueError:
3333 pass;
3334 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
3335 for databytes in geturls_text.iter_content(chunk_size=buffersize):
3336 datasize = len(databytes);
3337 fulldatasize = datasize + fulldatasize;
3338 percentage = "";
3339 if(downloadsize>0):
3340 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
3341 downloaddiff = fulldatasize - prevdownsize;
3342 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
3343 prevdownsize = fulldatasize;
3344 f.write(databytes);
3345 f.close();
3346 geturls_text.close();
3347 exec_time_end = time.time();
3348 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to download file.");
3349 returnval.update({'Filesize': os.path.getsize(tmpfilename), 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)});
3350 return returnval;
3352 if(not havehttpcore):
3353 def download_from_url_file_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
3354 returnval = download_from_url_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep)
3355 return returnval;
3357 if(havehttpcore):
3358 def download_from_url_to_file_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
3359 global geturls_download_sleep;
3360 if(sleep<0):
3361 sleep = geturls_download_sleep;
3362 if(not outfile=="-"):
3363 outpath = outpath.rstrip(os.path.sep);
3364 filepath = os.path.realpath(outpath+os.path.sep+outfile);
3365 if(not os.path.exists(outpath)):
3366 os.makedirs(outpath);
3367 if(os.path.exists(outpath) and os.path.isfile(outpath)):
3368 return False;
3369 if(os.path.exists(filepath) and os.path.isdir(filepath)):
3370 return False;
3371 pretmpfilename = download_from_url_file_with_httpcore2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
3372 if(not pretmpfilename):
3373 return False;
3374 tmpfilename = pretmpfilename['Filename'];
3375 downloadsize = os.path.getsize(tmpfilename);
3376 fulldatasize = 0;
3377 log.info("Moving file "+tmpfilename+" to "+filepath);
3378 exec_time_start = time.time();
3379 shutil.move(tmpfilename, filepath);
3380 try:
3381 os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple())));
3382 except AttributeError:
3383 try:
3384 os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
3385 except ValueError:
3386 pass;
3387 except ValueError:
3388 pass;
3389 exec_time_end = time.time();
3390 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to move file.");
3391 if(os.path.exists(tmpfilename)):
3392 os.remove(tmpfilename);
3393 returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
3394 if(outfile=="-" and sys.version[0]=="2"):
3395 pretmpfilename = download_from_url_file_with_httpcore2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
3396 if(not pretmpfilename):
3397 return False;
3398 tmpfilename = pretmpfilename['Filename'];
3399 downloadsize = os.path.getsize(tmpfilename);
3400 fulldatasize = 0;
3401 prevdownsize = 0;
3402 exec_time_start = time.time();
3403 with open(tmpfilename, 'rb') as ft:
3404 f = StringIO();
3405 while True:
3406 databytes = ft.read(buffersize[1]);
3407 if not databytes: break;
3408 datasize = len(databytes);
3409 fulldatasize = datasize + fulldatasize;
3410 percentage = "";
3411 if(downloadsize>0):
3412 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
3413 downloaddiff = fulldatasize - prevdownsize;
3414 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
3415 prevdownsize = fulldatasize;
3416 f.write(databytes);
3417 f.seek(0);
3418 fdata = f.getvalue();
3419 f.close();
3420 ft.close();
3421 os.remove(tmpfilename);
3422 exec_time_end = time.time();
3423 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
3424 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': ['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
3425 if(outfile=="-" and sys.version[0]>="3"):
3426 pretmpfilename = download_from_url_file_with_httpcore2(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
3427 tmpfilename = pretmpfilename['Filename'];
3428 downloadsize = os.path.getsize(tmpfilename);
3429 fulldatasize = 0;
3430 prevdownsize = 0;
3431 exec_time_start = time.time();
3432 with open(tmpfilename, 'rb') as ft:
3433 f = BytesIO();
3434 while True:
3435 databytes = ft.read(buffersize[1]);
3436 if not databytes: break;
3437 datasize = len(databytes);
3438 fulldatasize = datasize + fulldatasize;
3439 percentage = "";
3440 if(downloadsize>0):
3441 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
3442 downloaddiff = fulldatasize - prevdownsize;
3443 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
3444 prevdownsize = fulldatasize;
3445 f.write(databytes);
3446 f.seek(0);
3447 fdata = f.getvalue();
3448 f.close();
3449 ft.close();
3450 os.remove(tmpfilename);
3451 exec_time_end = time.time();
3452 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
3453 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
3454 return returnval;
3456 if(not havehttpx):
3457 def download_from_url_to_file_with_httpcore2(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
3458 returnval = download_from_url_to_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep)
3459 return returnval;
3461 if(haveurllib3):
3462 def download_from_url_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
3463 global geturls_download_sleep, havebrotli;
3464 if(sleep<0):
3465 sleep = geturls_download_sleep;
3466 urlparts = urlparse.urlparse(httpurl);
3467 if(isinstance(httpheaders, list)):
3468 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
3469 httpheaders = fix_header_names(httpheaders);
3470 if(httpuseragent is not None):
3471 if('User-Agent' in httpheaders):
3472 httpheaders['User-Agent'] = httpuseragent;
3473 else:
3474 httpuseragent.update({'User-Agent': httpuseragent});
3475 if(httpreferer is not None):
3476 if('Referer' in httpheaders):
3477 httpheaders['Referer'] = httpreferer;
3478 else:
3479 httpuseragent.update({'Referer': httpreferer});
3480 if(urlparts.username is not None or urlparts.password is not None):
3481 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
3482 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
3483 time.sleep(sleep);
3484 urllib_pool = urllib3.PoolManager(headers=httpheaders);
3485 if(postdata is not None and not isinstance(postdata, dict)):
3486 postdata = urlencode(postdata);
3487 try:
3488 if(httpmethod=="GET"):
3489 geturls_text = geturls_text = urllib_pool.request("GET", httpurl, headers=httpheaders, preload_content=False);
3490 elif(httpmethod=="POST"):
3491 geturls_text = geturls_text = urllib_pool.request("POST", httpurl, body=postdata, headers=httpheaders, preload_content=False);
3492 else:
3493 geturls_text = geturls_text = urllib_pool.request("GET", httpurl, headers=httpheaders, preload_content=False);
3494 except urllib3.exceptions.ConnectTimeoutError:
3495 log.info("Error With URL "+httpurl);
3496 return False;
3497 except urllib3.exceptions.ConnectError:
3498 log.info("Error With URL "+httpurl);
3499 return False;
3500 except urllib3.exceptions.MaxRetryError:
3501 log.info("Error With URL "+httpurl);
3502 return False;
3503 except socket.timeout:
3504 log.info("Error With URL "+httpurl);
3505 return False;
3506 httpcodeout = geturls_text.status;
3507 httpversionout = "1.1";
3508 httpmethodout = httpmethod;
3509 httpurlout = geturls_text.geturl();
3510 httpheaderout = geturls_text.info();
3511 httpheadersentout = httpheaders;
3512 if(isinstance(httpheaderout, list)):
3513 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
3514 if(sys.version[0]=="2"):
3515 try:
3516 prehttpheaderout = httpheaderout;
3517 httpheaderkeys = httpheaderout.keys();
3518 imax = len(httpheaderkeys);
3519 ic = 0;
3520 httpheaderout = {};
3521 while(ic < imax):
3522 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
3523 ic += 1;
3524 except AttributeError:
3525 pass;
3526 httpheaderout = fix_header_names(httpheaderout);
3527 if(isinstance(httpheadersentout, list)):
3528 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
3529 httpheadersentout = fix_header_names(httpheadersentout);
3530 log.info("Downloading URL "+httpurl);
3531 if(httpheaderout.get("Content-Encoding")=="gzip" or httpheaderout.get("Content-Encoding")=="deflate"):
3532 if(sys.version[0]=="2"):
3533 strbuf = StringIO(geturls_text.read());
3534 if(sys.version[0]>="3"):
3535 strbuf = BytesIO(geturls_text.read());
3536 gzstrbuf = gzip.GzipFile(fileobj=strbuf);
3537 returnval_content = gzstrbuf.read()[:];
3538 if(httpheaderout.get("Content-Encoding")!="gzip" and httpheaderout.get("Content-Encoding")!="deflate" and httpheaderout.get("Content-Encoding")!="br"):
3539 returnval_content = geturls_text.read()[:];
3540 if(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
3541 returnval_content = geturls_text.read()[:];
3542 returnval_content = brotli.decompress(returnval_content);
3543 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
3544 geturls_text.close();
3545 return returnval;
3547 if(not haveurllib3):
3548 def download_from_url_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
3549 returnval = download_from_url_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep)
3550 return returnval;
3552 if(haveurllib3):
3553 def download_from_url_file_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
3554 global geturls_download_sleep, tmpfileprefix, tmpfilesuffix;
3555 exec_time_start = time.time();
3556 myhash = hashlib.new("sha1");
3557 if(sys.version[0]=="2"):
3558 myhash.update(httpurl);
3559 myhash.update(str(buffersize));
3560 myhash.update(str(exec_time_start));
3561 if(sys.version[0]>="3"):
3562 myhash.update(httpurl.encode('utf-8'));
3563 myhash.update(str(buffersize).encode('utf-8'));
3564 myhash.update(str(exec_time_start).encode('utf-8'));
3565 newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest());
3566 if(sleep<0):
3567 sleep = geturls_download_sleep;
3568 urlparts = urlparse.urlparse(httpurl);
3569 if(isinstance(httpheaders, list)):
3570 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
3571 httpheaders = fix_header_names(httpheaders);
3572 if(httpuseragent is not None):
3573 if('User-Agent' in httpheaders):
3574 httpheaders['User-Agent'] = httpuseragent;
3575 else:
3576 httpuseragent.update({'User-Agent': httpuseragent});
3577 if(httpreferer is not None):
3578 if('Referer' in httpheaders):
3579 httpheaders['Referer'] = httpreferer;
3580 else:
3581 httpuseragent.update({'Referer': httpreferer});
3582 if(urlparts.username is not None or urlparts.password is not None):
3583 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
3584 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
3585 time.sleep(sleep);
3586 urllib_pool = urllib3.PoolManager(headers=httpheaders);
3587 if(postdata is not None and not isinstance(postdata, dict)):
3588 postdata = urlencode(postdata);
3589 try:
3590 if(httpmethod=="GET"):
3591 geturls_text = geturls_text = urllib_pool.request("GET", httpurl, headers=httpheaders, preload_content=False);
3592 elif(httpmethod=="POST"):
3593 geturls_text = geturls_text = urllib_pool.request("POST", httpurl, body=postdata, headers=httpheaders, preload_content=False);
3594 else:
3595 geturls_text = geturls_text = urllib_pool.request("GET", httpurl, headers=httpheaders, preload_content=False);
3596 except urllib3.exceptions.ConnectTimeoutError:
3597 log.info("Error With URL "+httpurl);
3598 return False;
3599 except urllib3.exceptions.ConnectError:
3600 log.info("Error With URL "+httpurl);
3601 return False;
3602 except urllib3.exceptions.MaxRetryError:
3603 log.info("Error With URL "+httpurl);
3604 return False;
3605 except socket.timeout:
3606 log.info("Error With URL "+httpurl);
3607 return False;
3608 httpcodeout = geturls_text.status;
3609 httpversionout = "1.1";
3610 httpmethodout = httpmethod;
3611 httpurlout = geturls_text.geturl();
3612 httpheaderout = geturls_text.info();
3613 httpheadersentout = httpheaders;
3614 if(isinstance(httpheaderout, list)):
3615 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
3616 if(sys.version[0]=="2"):
3617 try:
3618 prehttpheaderout = httpheaderout;
3619 httpheaderkeys = httpheaderout.keys();
3620 imax = len(httpheaderkeys);
3621 ic = 0;
3622 httpheaderout = {};
3623 while(ic < imax):
3624 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
3625 ic += 1;
3626 except AttributeError:
3627 pass;
3628 httpheaderout = fix_header_names(httpheaderout);
3629 if(isinstance(httpheadersentout, list)):
3630 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
3631 httpheadersentout = fix_header_names(httpheadersentout);
3632 downloadsize = int(geturls_text.headers.get('Content-Length'));
3633 if(downloadsize is not None):
3634 downloadsize = int(downloadsize);
3635 if downloadsize is None: downloadsize = 0;
3636 fulldatasize = 0;
3637 prevdownsize = 0;
3638 log.info("Downloading URL "+httpurl);
3639 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
3640 tmpfilename = f.name;
3641 try:
3642 os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple())));
3643 except AttributeError:
3644 try:
3645 os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
3646 except ValueError:
3647 pass;
3648 except ValueError:
3649 pass;
3650 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
3651 while True:
3652 databytes = geturls_text.read(buffersize);
3653 if not databytes: break;
3654 datasize = len(databytes);
3655 fulldatasize = datasize + fulldatasize;
3656 percentage = "";
3657 if(downloadsize>0):
3658 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
3659 downloaddiff = fulldatasize - prevdownsize;
3660 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
3661 prevdownsize = fulldatasize;
3662 f.write(databytes);
3663 f.close();
3664 geturls_text.close();
3665 exec_time_end = time.time();
3666 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to download file.");
3667 returnval.update({'Filesize': os.path.getsize(tmpfilename), 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)});
3668 return returnval;
3670 if(not haveurllib3):
3671 def download_from_url_file_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
3672 returnval = download_from_url_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep)
3673 return returnval;
3675 if(haveurllib3):
3676 def download_from_url_to_file_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
3677 global geturls_download_sleep;
3678 if(sleep<0):
3679 sleep = geturls_download_sleep;
3680 if(not outfile=="-"):
3681 outpath = outpath.rstrip(os.path.sep);
3682 filepath = os.path.realpath(outpath+os.path.sep+outfile);
3683 if(not os.path.exists(outpath)):
3684 os.makedirs(outpath);
3685 if(os.path.exists(outpath) and os.path.isfile(outpath)):
3686 return False;
3687 if(os.path.exists(filepath) and os.path.isdir(filepath)):
3688 return False;
3689 pretmpfilename = download_from_url_file_with_request3(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
3690 if(not pretmpfilename):
3691 return False;
3692 tmpfilename = pretmpfilename['Filename'];
3693 downloadsize = os.path.getsize(tmpfilename);
3694 fulldatasize = 0;
3695 log.info("Moving file "+tmpfilename+" to "+filepath);
3696 exec_time_start = time.time();
3697 shutil.move(tmpfilename, filepath);
3698 try:
3699 os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple())));
3700 except AttributeError:
3701 try:
3702 os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
3703 except ValueError:
3704 pass;
3705 except ValueError:
3706 pass;
3707 exec_time_end = time.time();
3708 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to move file.");
3709 if(os.path.exists(tmpfilename)):
3710 os.remove(tmpfilename);
3711 returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
3712 if(outfile=="-" and sys.version[0]=="2"):
3713 pretmpfilename = download_from_url_file_with_request3(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
3714 if(not pretmpfilename):
3715 return False;
3716 tmpfilename = pretmpfilename['Filename'];
3717 downloadsize = os.path.getsize(tmpfilename);
3718 fulldatasize = 0;
3719 prevdownsize = 0;
3720 exec_time_start = time.time();
3721 with open(tmpfilename, 'rb') as ft:
3722 f = StringIO();
3723 while True:
3724 databytes = ft.read(buffersize[1]);
3725 if not databytes: break;
3726 datasize = len(databytes);
3727 fulldatasize = datasize + fulldatasize;
3728 percentage = "";
3729 if(downloadsize>0):
3730 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
3731 downloaddiff = fulldatasize - prevdownsize;
3732 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
3733 prevdownsize = fulldatasize;
3734 f.write(databytes);
3735 f.seek(0);
3736 fdata = f.getvalue();
3737 f.close();
3738 ft.close();
3739 os.remove(tmpfilename);
3740 exec_time_end = time.time();
3741 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
3742 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
3743 if(outfile=="-" and sys.version[0]>="3"):
3744 pretmpfilename = download_from_url_file_with_request3(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
3745 tmpfilename = pretmpfilename['Filename'];
3746 downloadsize = os.path.getsize(tmpfilename);
3747 fulldatasize = 0;
3748 prevdownsize = 0;
3749 exec_time_start = time.time();
3750 with open(tmpfilename, 'rb') as ft:
3751 f = BytesIO();
3752 while True:
3753 databytes = ft.read(buffersize[1]);
3754 if not databytes: break;
3755 datasize = len(databytes);
3756 fulldatasize = datasize + fulldatasize;
3757 percentage = "";
3758 if(downloadsize>0):
3759 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
3760 downloaddiff = fulldatasize - prevdownsize;
3761 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
3762 prevdownsize = fulldatasize;
3763 f.write(databytes);
3764 f.seek(0);
3765 fdata = f.getvalue();
3766 f.close();
3767 ft.close();
3768 os.remove(tmpfilename);
3769 exec_time_end = time.time();
3770 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
3771 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
3772 return returnval;
3774 if(not haveurllib3):
3775 def download_from_url_to_file_with_request3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
3776 returnval = download_from_url_to_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep)
3777 return returnval;
3779 if(haveurllib3):
3780 def download_from_url_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
3781 global geturls_download_sleep, havebrotli;
3782 if(sleep<0):
3783 sleep = geturls_download_sleep;
3784 urlparts = urlparse.urlparse(httpurl);
3785 if(isinstance(httpheaders, list)):
3786 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
3787 httpheaders = fix_header_names(httpheaders);
3788 if(httpuseragent is not None):
3789 if('User-Agent' in httpheaders):
3790 httpheaders['User-Agent'] = httpuseragent;
3791 else:
3792 httpuseragent.update({'User-Agent': httpuseragent});
3793 if(httpreferer is not None):
3794 if('Referer' in httpheaders):
3795 httpheaders['Referer'] = httpreferer;
3796 else:
3797 httpuseragent.update({'Referer': httpreferer});
3798 if(urlparts.username is not None or urlparts.password is not None):
3799 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
3800 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
3801 time.sleep(sleep);
3802 urllib_pool = urllib3.PoolManager(headers=httpheaders);
3803 if(postdata is not None and not isinstance(postdata, dict)):
3804 postdata = urlencode(postdata);
3805 try:
3806 if(httpmethod=="GET"):
3807 geturls_text = urllib_pool.urlopen("GET", httpurl, headers=httpheaders, preload_content=False);
3808 elif(httpmethod=="POST"):
3809 geturls_text = urllib_pool.urlopen("GET", httpurl, body=postdata, headers=httpheaders, preload_content=False);
3810 else:
3811 geturls_text = urllib_pool.urlopen("GET", httpurl, headers=httpheaders, preload_content=False);
3812 except urllib3.exceptions.ConnectTimeoutError:
3813 log.info("Error With URL "+httpurl);
3814 return False;
3815 except urllib3.exceptions.ConnectError:
3816 log.info("Error With URL "+httpurl);
3817 return False;
3818 except urllib3.exceptions.MaxRetryError:
3819 log.info("Error With URL "+httpurl);
3820 return False;
3821 except socket.timeout:
3822 log.info("Error With URL "+httpurl);
3823 return False;
3824 httpcodeout = geturls_text.status;
3825 httpversionout = "1.1";
3826 httpmethodout = httpmethod;
3827 httpurlout = geturls_text.geturl();
3828 httpheaderout = geturls_text.info();
3829 httpheadersentout = httpheaders;
3830 if(isinstance(httpheaderout, list)):
3831 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
3832 if(sys.version[0]=="2"):
3833 try:
3834 prehttpheaderout = httpheaderout;
3835 httpheaderkeys = httpheaderout.keys();
3836 imax = len(httpheaderkeys);
3837 ic = 0;
3838 httpheaderout = {};
3839 while(ic < imax):
3840 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
3841 ic += 1;
3842 except AttributeError:
3843 pass;
3844 httpheaderout = fix_header_names(httpheaderout);
3845 if(isinstance(httpheadersentout, list)):
3846 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
3847 httpheadersentout = fix_header_names(httpheadersentout);
3848 log.info("Downloading URL "+httpurl);
3849 if(httpheaderout.get("Content-Encoding")=="gzip" or httpheaderout.get("Content-Encoding")=="deflate"):
3850 if(sys.version[0]=="2"):
3851 strbuf = StringIO(geturls_text.read());
3852 if(sys.version[0]>="3"):
3853 strbuf = BytesIO(geturls_text.read());
3854 gzstrbuf = gzip.GzipFile(fileobj=strbuf);
3855 returnval_content = gzstrbuf.read()[:];
3856 if(httpheaderout.get("Content-Encoding")!="gzip" and httpheaderout.get("Content-Encoding")!="deflate" and httpheaderout.get("Content-Encoding")!="br"):
3857 returnval_content = geturls_text.read()[:];
3858 if(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
3859 returnval_content = geturls_text.read()[:];
3860 returnval_content = brotli.decompress(returnval_content);
3861 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
3862 geturls_text.close();
3863 return returnval;
3865 if(not haveurllib3):
3866 def download_from_url_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
3867 returnval = download_from_url_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep)
3868 return returnval;
3870 if(haveurllib3):
3871 def download_from_url_file_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
3872 global geturls_download_sleep, tmpfileprefix, tmpfilesuffix;
3873 exec_time_start = time.time();
3874 myhash = hashlib.new("sha1");
3875 if(sys.version[0]=="2"):
3876 myhash.update(httpurl);
3877 myhash.update(str(buffersize));
3878 myhash.update(str(exec_time_start));
3879 if(sys.version[0]>="3"):
3880 myhash.update(httpurl.encode('utf-8'));
3881 myhash.update(str(buffersize).encode('utf-8'));
3882 myhash.update(str(exec_time_start).encode('utf-8'));
3883 newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest());
3884 if(sleep<0):
3885 sleep = geturls_download_sleep;
3886 urlparts = urlparse.urlparse(httpurl);
3887 if(isinstance(httpheaders, list)):
3888 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
3889 httpheaders = fix_header_names(httpheaders);
3890 if(httpuseragent is not None):
3891 if('User-Agent' in httpheaders):
3892 httpheaders['User-Agent'] = httpuseragent;
3893 else:
3894 httpuseragent.update({'User-Agent': httpuseragent});
3895 if(httpreferer is not None):
3896 if('Referer' in httpheaders):
3897 httpheaders['Referer'] = httpreferer;
3898 else:
3899 httpuseragent.update({'Referer': httpreferer});
3900 if(urlparts.username is not None or urlparts.password is not None):
3901 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
3902 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
3903 time.sleep(sleep);
3904 urllib_pool = urllib3.PoolManager(headers=httpheaders);
3905 if(postdata is not None and not isinstance(postdata, dict)):
3906 postdata = urlencode(postdata);
3907 try:
3908 if(httpmethod=="GET"):
3909 geturls_text = urllib_pool.urlopen("GET", httpurl, headers=httpheaders, preload_content=False);
3910 elif(httpmethod=="POST"):
3911 geturls_text = urllib_pool.urlopen("GET", httpurl, body=postdata, headers=httpheaders, preload_content=False);
3912 else:
3913 geturls_text = urllib_pool.urlopen("GET", httpurl, headers=httpheaders, preload_content=False);
3914 except urllib3.exceptions.ConnectTimeoutError:
3915 log.info("Error With URL "+httpurl);
3916 return False;
3917 except urllib3.exceptions.ConnectError:
3918 log.info("Error With URL "+httpurl);
3919 return False;
3920 except urllib3.exceptions.MaxRetryError:
3921 log.info("Error With URL "+httpurl);
3922 return False;
3923 except socket.timeout:
3924 log.info("Error With URL "+httpurl);
3925 return False;
3926 httpcodeout = geturls_text.status;
3927 httpversionout = "1.1";
3928 httpmethodout = httpmethod;
3929 httpurlout = geturls_text.geturl();
3930 httpheaderout = geturls_text.info();
3931 httpheadersentout = httpheaders;
3932 if(isinstance(httpheaderout, list)):
3933 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
3934 if(sys.version[0]=="2"):
3935 try:
3936 prehttpheaderout = httpheaderout;
3937 httpheaderkeys = httpheaderout.keys();
3938 imax = len(httpheaderkeys);
3939 ic = 0;
3940 httpheaderout = {};
3941 while(ic < imax):
3942 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
3943 ic += 1;
3944 except AttributeError:
3945 pass;
3946 httpheaderout = fix_header_names(httpheaderout);
3947 if(isinstance(httpheadersentout, list)):
3948 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
3949 httpheadersentout = fix_header_names(httpheadersentout);
3950 downloadsize = int(geturls_text.headers.get('Content-Length'));
3951 if(downloadsize is not None):
3952 downloadsize = int(downloadsize);
3953 if downloadsize is None: downloadsize = 0;
3954 fulldatasize = 0;
3955 prevdownsize = 0;
3956 log.info("Downloading URL "+httpurl);
3957 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
3958 tmpfilename = f.name;
3959 try:
3960 os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple())));
3961 except AttributeError:
3962 try:
3963 os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
3964 except ValueError:
3965 pass;
3966 except ValueError:
3967 pass;
3968 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
3969 while True:
3970 databytes = geturls_text.read(buffersize);
3971 if not databytes: break;
3972 datasize = len(databytes);
3973 fulldatasize = datasize + fulldatasize;
3974 percentage = "";
3975 if(downloadsize>0):
3976 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
3977 downloaddiff = fulldatasize - prevdownsize;
3978 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
3979 prevdownsize = fulldatasize;
3980 f.write(databytes);
3981 f.close();
3982 geturls_text.close();
3983 exec_time_end = time.time();
3984 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to download file.");
3985 returnval.update({'Filesize': os.path.getsize(tmpfilename), 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)});
3986 return returnval;
3988 if(not haveurllib3):
3989 def download_from_url_file_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
3990 returnval = download_from_url_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep)
3991 return returnval;
3993 if(haveurllib3):
3994 def download_from_url_to_file_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
3995 global geturls_download_sleep;
3996 if(sleep<0):
3997 sleep = geturls_download_sleep;
3998 if(not outfile=="-"):
3999 outpath = outpath.rstrip(os.path.sep);
4000 filepath = os.path.realpath(outpath+os.path.sep+outfile);
4001 if(not os.path.exists(outpath)):
4002 os.makedirs(outpath);
4003 if(os.path.exists(outpath) and os.path.isfile(outpath)):
4004 return False;
4005 if(os.path.exists(filepath) and os.path.isdir(filepath)):
4006 return False;
4007 pretmpfilename = download_from_url_file_with_urllib3(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
4008 if(not pretmpfilename):
4009 return False;
4010 tmpfilename = pretmpfilename['Filename'];
4011 downloadsize = os.path.getsize(tmpfilename);
4012 fulldatasize = 0;
4013 log.info("Moving file "+tmpfilename+" to "+filepath);
4014 exec_time_start = time.time();
4015 shutil.move(tmpfilename, filepath);
4016 try:
4017 os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple())));
4018 except AttributeError:
4019 try:
4020 os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
4021 except ValueError:
4022 pass;
4023 except ValueError:
4024 pass;
4025 exec_time_end = time.time();
4026 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to move file.");
4027 if(os.path.exists(tmpfilename)):
4028 os.remove(tmpfilename);
4029 returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
4030 if(outfile=="-" and sys.version[0]=="2"):
4031 pretmpfilename = download_from_url_file_with_urllib3(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
4032 if(not pretmpfilename):
4033 return False;
4034 tmpfilename = pretmpfilename['Filename'];
4035 downloadsize = os.path.getsize(tmpfilename);
4036 fulldatasize = 0;
4037 prevdownsize = 0;
4038 exec_time_start = time.time();
4039 with open(tmpfilename, 'rb') as ft:
4040 f = StringIO();
4041 while True:
4042 databytes = ft.read(buffersize[1]);
4043 if not databytes: break;
4044 datasize = len(databytes);
4045 fulldatasize = datasize + fulldatasize;
4046 percentage = "";
4047 if(downloadsize>0):
4048 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
4049 downloaddiff = fulldatasize - prevdownsize;
4050 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
4051 prevdownsize = fulldatasize;
4052 f.write(databytes);
4053 f.seek(0);
4054 fdata = f.getvalue();
4055 f.close();
4056 ft.close();
4057 os.remove(tmpfilename);
4058 exec_time_end = time.time();
4059 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
4060 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
4061 if(outfile=="-" and sys.version[0]>="3"):
4062 pretmpfilename = download_from_url_file_with_urllib3(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
4063 tmpfilename = pretmpfilename['Filename'];
4064 downloadsize = os.path.getsize(tmpfilename);
4065 fulldatasize = 0;
4066 prevdownsize = 0;
4067 exec_time_start = time.time();
4068 with open(tmpfilename, 'rb') as ft:
4069 f = BytesIO();
4070 while True:
4071 databytes = ft.read(buffersize[1]);
4072 if not databytes: break;
4073 datasize = len(databytes);
4074 fulldatasize = datasize + fulldatasize;
4075 percentage = "";
4076 if(downloadsize>0):
4077 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
4078 downloaddiff = fulldatasize - prevdownsize;
4079 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
4080 prevdownsize = fulldatasize;
4081 f.write(databytes);
4082 f.seek(0);
4083 fdata = f.getvalue();
4084 f.close();
4085 ft.close();
4086 os.remove(tmpfilename);
4087 exec_time_end = time.time();
4088 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
4089 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
4090 return returnval;
4092 if(not haveurllib3):
4093 def download_from_url_to_file_with_urllib3(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
4094 returnval = download_from_url_to_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep)
4095 return returnval;
4097 if(havemechanize):
4098 def download_from_url_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
4099 global geturls_download_sleep, havebrotli;
4100 if(sleep<0):
4101 sleep = geturls_download_sleep;
4102 urlparts = urlparse.urlparse(httpurl);
4103 if(isinstance(httpheaders, list)):
4104 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
4105 httpheaders = fix_header_names(httpheaders);
4106 if(httpuseragent is not None):
4107 if('User-Agent' in httpheaders):
4108 httpheaders['User-Agent'] = httpuseragent;
4109 else:
4110 httpuseragent.update({'User-Agent': httpuseragent});
4111 if(httpreferer is not None):
4112 if('Referer' in httpheaders):
4113 httpheaders['Referer'] = httpreferer;
4114 else:
4115 httpuseragent.update({'Referer': httpreferer});
4116 if(urlparts.username is not None or urlparts.password is not None):
4117 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
4118 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
4119 geturls_opener = mechanize.Browser();
4120 if(isinstance(httpheaders, dict)):
4121 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
4122 time.sleep(sleep);
4123 geturls_opener.addheaders = httpheaders;
4124 geturls_opener.set_cookiejar(httpcookie);
4125 geturls_opener.set_handle_robots(False);
4126 if(postdata is not None and not isinstance(postdata, dict)):
4127 postdata = urlencode(postdata);
4128 try:
4129 if(httpmethod=="GET"):
4130 geturls_text = geturls_opener.open(httpurl);
4131 elif(httpmethod=="POST"):
4132 geturls_text = geturls_opener.open(httpurl, data=postdata);
4133 else:
4134 geturls_text = geturls_opener.open(httpurl);
4135 except mechanize.HTTPError as geturls_text_error:
4136 geturls_text = geturls_text_error;
4137 log.info("Error With URL "+httpurl);
4138 except URLError:
4139 log.info("Error With URL "+httpurl);
4140 return False;
4141 except socket.timeout:
4142 log.info("Error With URL "+httpurl);
4143 return False;
4144 httpcodeout = geturls_text.code;
4145 httpversionout = "1.1";
4146 httpmethodout = httpmethod;
4147 httpurlout = geturls_text.geturl();
4148 httpheaderout = geturls_text.info();
4149 reqhead = geturls_opener.request;
4150 httpheadersentout = reqhead.header_items();
4151 if(isinstance(httpheaderout, list)):
4152 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
4153 if(sys.version[0]=="2"):
4154 try:
4155 prehttpheaderout = httpheaderout;
4156 httpheaderkeys = httpheaderout.keys();
4157 imax = len(httpheaderkeys);
4158 ic = 0;
4159 httpheaderout = {};
4160 while(ic < imax):
4161 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
4162 ic += 1;
4163 except AttributeError:
4164 pass;
4165 httpheaderout = fix_header_names(httpheaderout);
4166 if(isinstance(httpheadersentout, list)):
4167 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
4168 httpheadersentout = fix_header_names(httpheadersentout);
4169 log.info("Downloading URL "+httpurl);
4170 if(httpheaderout.get("Content-Encoding")=="gzip" or httpheaderout.get("Content-Encoding")=="deflate"):
4171 if(sys.version[0]=="2"):
4172 strbuf = StringIO(geturls_text.read());
4173 if(sys.version[0]>="3"):
4174 strbuf = BytesIO(geturls_text.read());
4175 gzstrbuf = gzip.GzipFile(fileobj=strbuf);
4176 returnval_content = gzstrbuf.read()[:];
4177 if(httpheaderout.get("Content-Encoding")!="gzip" and httpheaderout.get("Content-Encoding")!="deflate" and httpheaderout.get("Content-Encoding")!="br"):
4178 returnval_content = geturls_text.read()[:];
4179 if(httpheaderout.get("Content-Encoding")=="br" and havebrotli):
4180 returnval_content = geturls_text.read()[:];
4181 returnval_content = brotli.decompress(returnval_content);
4182 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
4183 geturls_text.close();
4184 return returnval;
4186 if(not havemechanize):
4187 def download_from_url_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
4188 returnval = download_from_url_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, sleep)
4189 return returnval;
4191 if(havemechanize):
4192 def download_from_url_file_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
4193 global geturls_download_sleep, tmpfileprefix, tmpfilesuffix;
4194 exec_time_start = time.time();
4195 myhash = hashlib.new("sha1");
4196 if(sys.version[0]=="2"):
4197 myhash.update(httpurl);
4198 myhash.update(str(buffersize));
4199 myhash.update(str(exec_time_start));
4200 if(sys.version[0]>="3"):
4201 myhash.update(httpurl.encode('utf-8'));
4202 myhash.update(str(buffersize).encode('utf-8'));
4203 myhash.update(str(exec_time_start).encode('utf-8'));
4204 newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest());
4205 if(sleep<0):
4206 sleep = geturls_download_sleep;
4207 urlparts = urlparse.urlparse(httpurl);
4208 if(isinstance(httpheaders, list)):
4209 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
4210 httpheaders = fix_header_names(httpheaders);
4211 if(httpuseragent is not None):
4212 if('User-Agent' in httpheaders):
4213 httpheaders['User-Agent'] = httpuseragent;
4214 else:
4215 httpuseragent.update({'User-Agent': httpuseragent});
4216 if(httpreferer is not None):
4217 if('Referer' in httpheaders):
4218 httpheaders['Referer'] = httpreferer;
4219 else:
4220 httpuseragent.update({'Referer': httpreferer});
4221 if(urlparts.username is not None or urlparts.password is not None):
4222 inurlencode = b64encode(str(urlparts.username+":"+urlparts.password).encode()).decode("UTF-8");
4223 httpheaders.update( { 'Authorization': "Basic "+inurlencode } );
4224 geturls_opener = mechanize.Browser();
4225 if(isinstance(httpheaders, dict)):
4226 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
4227 time.sleep(sleep);
4228 geturls_opener.addheaders = httpheaders;
4229 geturls_opener.set_cookiejar(httpcookie);
4230 geturls_opener.set_handle_robots(False);
4231 if(postdata is not None and not isinstance(postdata, dict)):
4232 postdata = urlencode(postdata);
4233 try:
4234 if(httpmethod=="GET"):
4235 geturls_text = geturls_opener.open(httpurl);
4236 elif(httpmethod=="POST"):
4237 geturls_text = geturls_opener.open(httpurl, data=postdata);
4238 else:
4239 geturls_text = geturls_opener.open(httpurl);
4240 except mechanize.HTTPError as geturls_text_error:
4241 geturls_text = geturls_text_error;
4242 log.info("Error With URL "+httpurl);
4243 except URLError:
4244 log.info("Error With URL "+httpurl);
4245 return False;
4246 except socket.timeout:
4247 log.info("Error With URL "+httpurl);
4248 return False;
4249 httpcodeout = geturls_text.code;
4250 httpversionout = "1.1";
4251 httpmethodout = httpmethod;
4252 httpurlout = geturls_text.geturl();
4253 httpheaderout = geturls_text.info();
4254 reqhead = geturls_opener.request;
4255 httpheadersentout = reqhead.header_items();
4256 if(isinstance(httpheaderout, list)):
4257 httpheaderout = dict(make_http_headers_from_list_to_dict(httpheaderout));
4258 if(sys.version[0]=="2"):
4259 try:
4260 prehttpheaderout = httpheaderout;
4261 httpheaderkeys = httpheaderout.keys();
4262 imax = len(httpheaderkeys);
4263 ic = 0;
4264 httpheaderout = {};
4265 while(ic < imax):
4266 httpheaderout.update({httpheaderkeys[ic]: prehttpheaderout[httpheaderkeys[ic]]});
4267 ic += 1;
4268 except AttributeError:
4269 pass;
4270 httpheaderout = fix_header_names(httpheaderout);
4271 if(isinstance(httpheadersentout, list)):
4272 httpheadersentout = dict(make_http_headers_from_list_to_dict(httpheadersentout));
4273 httpheadersentout = fix_header_names(httpheadersentout);
4274 downloadsize = int(httpheaderout.get('Content-Length'));
4275 if(downloadsize is not None):
4276 downloadsize = int(downloadsize);
4277 if downloadsize is None: downloadsize = 0;
4278 fulldatasize = 0;
4279 prevdownsize = 0;
4280 log.info("Downloading URL "+httpurl);
4281 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
4282 tmpfilename = f.name;
4283 try:
4284 os.utime(tmpfilename, (time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(httpheaderout.get('Last-Modified')).timetuple())));
4285 except AttributeError:
4286 try:
4287 os.utime(tmpfilename, (time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(httpheaderout.get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
4288 except ValueError:
4289 pass;
4290 except ValueError:
4291 pass;
4292 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Headers': httpheaderout, 'Version': httpversionout, 'Method': httpmethodout, 'HeadersSent': httpheadersentout, 'URL': httpurlout, 'Code': httpcodeout};
4293 while True:
4294 databytes = geturls_text.read(buffersize);
4295 if not databytes: break;
4296 datasize = len(databytes);
4297 fulldatasize = datasize + fulldatasize;
4298 percentage = "";
4299 if(downloadsize>0):
4300 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
4301 downloaddiff = fulldatasize - prevdownsize;
4302 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
4303 prevdownsize = fulldatasize;
4304 f.write(databytes);
4305 f.close();
4306 geturls_text.close();
4307 exec_time_end = time.time();
4308 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to download file.");
4309 returnval.update({'Filesize': os.path.getsize(tmpfilename), 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)});
4310 return returnval;
4312 if(not havemechanize):
4313 def download_from_url_file_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
4314 returnval = download_from_url_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, sleep)
4315 return returnval;
4317 if(havemechanize):
4318 def download_from_url_to_file_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
4319 global geturls_download_sleep;
4320 if(sleep<0):
4321 sleep = geturls_download_sleep;
4322 if(not outfile=="-"):
4323 outpath = outpath.rstrip(os.path.sep);
4324 filepath = os.path.realpath(outpath+os.path.sep+outfile);
4325 if(not os.path.exists(outpath)):
4326 os.makedirs(outpath);
4327 if(os.path.exists(outpath) and os.path.isfile(outpath)):
4328 return False;
4329 if(os.path.exists(filepath) and os.path.isdir(filepath)):
4330 return False;
4331 pretmpfilename = download_from_url_file_with_mechanize(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
4332 if(not pretmpfilename):
4333 return False;
4334 tmpfilename = pretmpfilename['Filename'];
4335 downloadsize = os.path.getsize(tmpfilename);
4336 fulldatasize = 0;
4337 log.info("Moving file "+tmpfilename+" to "+filepath);
4338 exec_time_start = time.time();
4339 shutil.move(tmpfilename, filepath);
4340 try:
4341 os.utime(filepath, (time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple()), time.mktime(email.utils.parsedate_to_datetime(pretmpfilename.get('Headers').get('Last-Modified')).timetuple())));
4342 except AttributeError:
4343 try:
4344 os.utime(filepath, (time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple()), time.mktime(datetime.datetime.strptime(pretmpfilename.get('Headers').get('Last-Modified'), "%a, %d %b %Y %H:%M:%S %Z").timetuple())));
4345 except ValueError:
4346 pass;
4347 except ValueError:
4348 pass;
4349 exec_time_end = time.time();
4350 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to move file.");
4351 if(os.path.exists(tmpfilename)):
4352 os.remove(tmpfilename);
4353 returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
4354 if(outfile=="-" and sys.version[0]=="2"):
4355 pretmpfilename = download_from_url_file_with_mechanize(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
4356 if(not pretmpfilename):
4357 return False;
4358 tmpfilename = pretmpfilename['Filename'];
4359 downloadsize = os.path.getsize(tmpfilename);
4360 fulldatasize = 0;
4361 prevdownsize = 0;
4362 exec_time_start = time.time();
4363 with open(tmpfilename, 'rb') as ft:
4364 f = StringIO();
4365 while True:
4366 databytes = ft.read(buffersize[1]);
4367 if not databytes: break;
4368 datasize = len(databytes);
4369 fulldatasize = datasize + fulldatasize;
4370 percentage = "";
4371 if(downloadsize>0):
4372 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
4373 downloaddiff = fulldatasize - prevdownsize;
4374 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
4375 prevdownsize = fulldatasize;
4376 f.write(databytes);
4377 f.seek(0);
4378 fdata = f.getvalue();
4379 f.close();
4380 ft.close();
4381 os.remove(tmpfilename);
4382 exec_time_end = time.time();
4383 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
4384 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
4385 if(outfile=="-" and sys.version[0]>="3"):
4386 pretmpfilename = download_from_url_file_with_mechanize(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
4387 tmpfilename = pretmpfilename['Filename'];
4388 downloadsize = os.path.getsize(tmpfilename);
4389 fulldatasize = 0;
4390 prevdownsize = 0;
4391 exec_time_start = time.time();
4392 with open(tmpfilename, 'rb') as ft:
4393 f = BytesIO();
4394 while True:
4395 databytes = ft.read(buffersize[1]);
4396 if not databytes: break;
4397 datasize = len(databytes);
4398 fulldatasize = datasize + fulldatasize;
4399 percentage = "";
4400 if(downloadsize>0):
4401 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
4402 downloaddiff = fulldatasize - prevdownsize;
4403 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
4404 prevdownsize = fulldatasize;
4405 f.write(databytes);
4406 f.seek(0);
4407 fdata = f.getvalue();
4408 f.close();
4409 ft.close();
4410 os.remove(tmpfilename);
4411 exec_time_end = time.time();
4412 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
4413 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': httpmethod, 'HeadersSent': ['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
4414 return returnval;
4416 if(not havemechanize):
4417 def download_from_url_to_file_with_mechanize(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
4418 returnval = download_from_url_to_file_with_urllib(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize, outfile, outpath, sleep)
4419 return returnval;
4421 def download_file_from_ftp_file(url):
4422 urlparts = urlparse.urlparse(url);
4423 file_name = os.path.basename(urlparts.path);
4424 file_dir = os.path.dirname(urlparts.path);
4425 if(urlparts.username is not None):
4426 ftp_username = urlparts.username;
4427 else:
4428 ftp_username = "anonymous";
4429 if(urlparts.password is not None):
4430 ftp_password = urlparts.password;
4431 elif(urlparts.password is None and urlparts.username=="anonymous"):
4432 ftp_password = "anonymous";
4433 else:
4434 ftp_password = "";
4435 if(urlparts.scheme=="ftp"):
4436 ftp = FTP();
4437 elif(urlparts.scheme=="ftps"):
4438 ftp = FTP_TLS();
4439 else:
4440 return False;
4441 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
4442 return False;
4443 ftp_port = urlparts.port;
4444 if(urlparts.port is None):
4445 ftp_port = 21;
4446 try:
4447 ftp.connect(urlparts.hostname, ftp_port);
4448 except socket.gaierror:
4449 log.info("Error With URL "+httpurl);
4450 return False;
4451 except socket.timeout:
4452 log.info("Error With URL "+httpurl);
4453 return False;
4454 ftp.login(urlparts.username, urlparts.password);
4455 if(urlparts.scheme=="ftps"):
4456 ftp.prot_p();
4457 ftpfile = BytesIO();
4458 ftp.retrbinary("RETR "+urlparts.path, ftpfile.write);
4459 #ftp.storbinary("STOR "+urlparts.path, ftpfile.write);
4460 ftp.close();
4461 ftpfile.seek(0, 0);
4462 return ftpfile;
4464 def download_file_from_ftp_string(url):
4465 ftpfile = download_file_from_ftp_file(url);
4466 return ftpfile.read();
4468 def download_from_url_with_ftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
4469 global geturls_download_sleep, havebrotli;
4470 if(sleep<0):
4471 sleep = geturls_download_sleep;
4472 urlparts = urlparse.urlparse(httpurl);
4473 if(isinstance(httpheaders, list)):
4474 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
4475 httpheaders = fix_header_names(httpheaders);
4476 if(httpuseragent is not None):
4477 if('User-Agent' in httpheaders):
4478 httpheaders['User-Agent'] = httpuseragent;
4479 else:
4480 httpuseragent.update({'User-Agent': httpuseragent});
4481 if(httpreferer is not None):
4482 if('Referer' in httpheaders):
4483 httpheaders['Referer'] = httpreferer;
4484 else:
4485 httpuseragent.update({'Referer': httpreferer});
4486 if(isinstance(httpheaders, dict)):
4487 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
4488 time.sleep(sleep);
4489 geturls_text = download_file_from_ftp_file(httpurl);
4490 if(not geturls_text):
4491 return False;
4492 log.info("Downloading URL "+httpurl);
4493 returnval_content = geturls_text.read()[:];
4494 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None};
4495 geturls_text.close();
4496 return returnval;
4498 def download_from_url_file_with_ftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
4499 global geturls_download_sleep, tmpfileprefix, tmpfilesuffix;
4500 exec_time_start = time.time();
4501 myhash = hashlib.new("sha1");
4502 if(sys.version[0]=="2"):
4503 myhash.update(httpurl);
4504 myhash.update(str(buffersize));
4505 myhash.update(str(exec_time_start));
4506 if(sys.version[0]>="3"):
4507 myhash.update(httpurl.encode('utf-8'));
4508 myhash.update(str(buffersize).encode('utf-8'));
4509 myhash.update(str(exec_time_start).encode('utf-8'));
4510 newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest());
4511 if(sleep<0):
4512 sleep = geturls_download_sleep;
4513 urlparts = urlparse.urlparse(httpurl);
4514 if(isinstance(httpheaders, list)):
4515 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
4516 httpheaders = fix_header_names(httpheaders);
4517 if(httpuseragent is not None):
4518 if('User-Agent' in httpheaders):
4519 httpheaders['User-Agent'] = httpuseragent;
4520 else:
4521 httpuseragent.update({'User-Agent': httpuseragent});
4522 if(httpreferer is not None):
4523 if('Referer' in httpheaders):
4524 httpheaders['Referer'] = httpreferer;
4525 else:
4526 httpuseragent.update({'Referer': httpreferer});
4527 if(isinstance(httpheaders, dict)):
4528 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
4529 time.sleep(sleep);
4530 geturls_text = download_file_from_ftp_file(httpurl);
4531 if(not geturls_text):
4532 return False;
4533 geturls_text.seek(0, 2);
4534 downloadsize = geturls_text.tell();
4535 geturls_text.seek(0, 0);
4536 if(downloadsize is not None):
4537 downloadsize = int(downloadsize);
4538 if downloadsize is None: downloadsize = 0;
4539 fulldatasize = 0;
4540 prevdownsize = 0;
4541 log.info("Downloading URL "+httpurl);
4542 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
4543 tmpfilename = f.name;
4544 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None};
4545 while True:
4546 databytes = geturls_text.read(buffersize);
4547 if not databytes: break;
4548 datasize = len(databytes);
4549 fulldatasize = datasize + fulldatasize;
4550 percentage = "";
4551 if(downloadsize>0):
4552 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
4553 downloaddiff = fulldatasize - prevdownsize;
4554 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
4555 prevdownsize = fulldatasize;
4556 f.write(databytes);
4557 f.close();
4558 geturls_text.close();
4559 exec_time_end = time.time();
4560 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to download file.");
4561 returnval.update({'Filesize': os.path.getsize(tmpfilename), 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)});
4562 return returnval;
4564 def download_from_url_to_file_with_ftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
4565 global geturls_download_sleep;
4566 if(sleep<0):
4567 sleep = geturls_download_sleep;
4568 if(not outfile=="-"):
4569 outpath = outpath.rstrip(os.path.sep);
4570 filepath = os.path.realpath(outpath+os.path.sep+outfile);
4571 if(not os.path.exists(outpath)):
4572 os.makedirs(outpath);
4573 if(os.path.exists(outpath) and os.path.isfile(outpath)):
4574 return False;
4575 if(os.path.exists(filepath) and os.path.isdir(filepath)):
4576 return False;
4577 pretmpfilename = download_from_url_file_with_ftp(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
4578 if(not pretmpfilename):
4579 return False;
4580 tmpfilename = pretmpfilename['Filename'];
4581 downloadsize = os.path.getsize(tmpfilename);
4582 fulldatasize = 0;
4583 log.info("Moving file "+tmpfilename+" to "+filepath);
4584 exec_time_start = time.time();
4585 shutil.move(tmpfilename, filepath);
4586 exec_time_end = time.time();
4587 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to move file.");
4588 if(os.path.exists(tmpfilename)):
4589 os.remove(tmpfilename);
4590 returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': None, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
4591 if(outfile=="-" and sys.version[0]=="2"):
4592 pretmpfilename = download_from_url_file_with_ftp(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
4593 if(not pretmpfilename):
4594 return False;
4595 tmpfilename = pretmpfilename['Filename'];
4596 downloadsize = os.path.getsize(tmpfilename);
4597 fulldatasize = 0;
4598 prevdownsize = 0;
4599 exec_time_start = time.time();
4600 with open(tmpfilename, 'rb') as ft:
4601 f = StringIO();
4602 while True:
4603 databytes = ft.read(buffersize[1]);
4604 if not databytes: break;
4605 datasize = len(databytes);
4606 fulldatasize = datasize + fulldatasize;
4607 percentage = "";
4608 if(downloadsize>0):
4609 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
4610 downloaddiff = fulldatasize - prevdownsize;
4611 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
4612 prevdownsize = fulldatasize;
4613 f.write(databytes);
4614 f.seek(0);
4615 fdata = f.getvalue();
4616 f.close();
4617 ft.close();
4618 os.remove(tmpfilename);
4619 exec_time_end = time.time();
4620 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
4621 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': None, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
4622 if(outfile=="-" and sys.version[0]>="3"):
4623 pretmpfilename = download_from_url_file_with_ftp(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
4624 tmpfilename = pretmpfilename['Filename'];
4625 downloadsize = os.path.getsize(tmpfilename);
4626 fulldatasize = 0;
4627 prevdownsize = 0;
4628 exec_time_start = time.time();
4629 with open(tmpfilename, 'rb') as ft:
4630 f = BytesIO();
4631 while True:
4632 databytes = ft.read(buffersize[1]);
4633 if not databytes: break;
4634 datasize = len(databytes);
4635 fulldatasize = datasize + fulldatasize;
4636 percentage = "";
4637 if(downloadsize>0):
4638 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
4639 downloaddiff = fulldatasize - prevdownsize;
4640 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
4641 prevdownsize = fulldatasize;
4642 f.write(databytes);
4643 f.seek(0);
4644 fdata = f.getvalue();
4645 f.close();
4646 ft.close();
4647 os.remove(tmpfilename);
4648 exec_time_end = time.time();
4649 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
4650 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': None, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
4651 return returnval;
4653 def upload_file_to_ftp_file(ftpfile, url):
4654 urlparts = urlparse.urlparse(url);
4655 file_name = os.path.basename(urlparts.path);
4656 file_dir = os.path.dirname(urlparts.path);
4657 if(urlparts.username is not None):
4658 ftp_username = urlparts.username;
4659 else:
4660 ftp_username = "anonymous";
4661 if(urlparts.password is not None):
4662 ftp_password = urlparts.password;
4663 elif(urlparts.password is None and urlparts.username=="anonymous"):
4664 ftp_password = "anonymous";
4665 else:
4666 ftp_password = "";
4667 if(urlparts.scheme=="ftp"):
4668 ftp = FTP();
4669 elif(urlparts.scheme=="ftps"):
4670 ftp = FTP_TLS();
4671 else:
4672 return False;
4673 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
4674 return False;
4675 ftp_port = urlparts.port;
4676 if(urlparts.port is None):
4677 ftp_port = 21;
4678 try:
4679 ftp.connect(urlparts.hostname, ftp_port);
4680 except socket.gaierror:
4681 log.info("Error With URL "+httpurl);
4682 return False;
4683 except socket.timeout:
4684 log.info("Error With URL "+httpurl);
4685 return False;
4686 ftp.login(urlparts.username, urlparts.password);
4687 if(urlparts.scheme=="ftps"):
4688 ftp.prot_p();
4689 ftp.storbinary("STOR "+urlparts.path, ftpfile);
4690 ftp.close();
4691 ftpfile.seek(0, 0);
4692 return ftpfile;
4694 def upload_file_to_ftp_string(ftpstring, url):
4695 ftpfileo = BytesIO(ftpstring);
4696 ftpfile = upload_file_to_ftp_file(ftpfileo, url);
4697 ftpfileo.close();
4698 return ftpfile;
4700 if(haveparamiko):
4701 def download_file_from_sftp_file(url):
4702 urlparts = urlparse.urlparse(url);
4703 file_name = os.path.basename(urlparts.path);
4704 file_dir = os.path.dirname(urlparts.path);
4705 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
4706 return False;
4707 sftp_port = urlparts.port;
4708 if(urlparts.port is None):
4709 sftp_port = 22;
4710 else:
4711 sftp_port = urlparts.port;
4712 if(urlparts.username is not None):
4713 sftp_username = urlparts.username;
4714 else:
4715 sftp_username = "anonymous";
4716 if(urlparts.password is not None):
4717 sftp_password = urlparts.password;
4718 elif(urlparts.password is None and urlparts.username=="anonymous"):
4719 sftp_password = "anonymous";
4720 else:
4721 sftp_password = "";
4722 if(urlparts.scheme!="sftp"):
4723 return False;
4724 ssh = paramiko.SSHClient();
4725 ssh.load_system_host_keys();
4726 ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy());
4727 try:
4728 ssh.connect(urlparts.hostname, port=sftp_port, username=urlparts.username, password=urlparts.password);
4729 except paramiko.ssh_exception.SSHException:
4730 return False;
4731 except socket.gaierror:
4732 log.info("Error With URL "+httpurl);
4733 return False;
4734 except socket.timeout:
4735 log.info("Error With URL "+httpurl);
4736 return False;
4737 sftp = ssh.open_sftp();
4738 sftpfile = BytesIO();
4739 sftp.getfo(urlparts.path, sftpfile);
4740 sftp.close();
4741 ssh.close();
4742 sftpfile.seek(0, 0);
4743 return sftpfile;
4744 else:
4745 def download_file_from_sftp_file(url):
4746 return False;
4748 if(haveparamiko):
4749 def download_file_from_sftp_string(url):
4750 sftpfile = download_file_from_sftp_file(url);
4751 return sftpfile.read();
4752 else:
4753 def download_file_from_ftp_string(url):
4754 return False;
4756 if(haveparamiko):
4757 def download_from_url_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
4758 global geturls_download_sleep, havebrotli;
4759 if(sleep<0):
4760 sleep = geturls_download_sleep;
4761 urlparts = urlparse.urlparse(httpurl);
4762 if(isinstance(httpheaders, list)):
4763 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
4764 httpheaders = fix_header_names(httpheaders);
4765 if(httpuseragent is not None):
4766 if('User-Agent' in httpheaders):
4767 httpheaders['User-Agent'] = httpuseragent;
4768 else:
4769 httpuseragent.update({'User-Agent': httpuseragent});
4770 if(httpreferer is not None):
4771 if('Referer' in httpheaders):
4772 httpheaders['Referer'] = httpreferer;
4773 else:
4774 httpuseragent.update({'Referer': httpreferer});
4775 if(isinstance(httpheaders, dict)):
4776 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
4777 time.sleep(sleep);
4778 geturls_text = download_file_from_sftp_file(httpurl);
4779 if(not geturls_text):
4780 return False;
4781 log.info("Downloading URL "+httpurl);
4782 returnval_content = geturls_text.read()[:];
4783 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None};
4784 geturls_text.close();
4785 return returnval;
4787 if(not haveparamiko):
4788 def download_from_url_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
4789 return False;
4791 if(haveparamiko):
4792 def download_from_url_file_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
4793 global geturls_download_sleep, tmpfileprefix, tmpfilesuffix;
4794 exec_time_start = time.time();
4795 myhash = hashlib.new("sha1");
4796 if(sys.version[0]=="2"):
4797 myhash.update(httpurl);
4798 myhash.update(str(buffersize));
4799 myhash.update(str(exec_time_start));
4800 if(sys.version[0]>="3"):
4801 myhash.update(httpurl.encode('utf-8'));
4802 myhash.update(str(buffersize).encode('utf-8'));
4803 myhash.update(str(exec_time_start).encode('utf-8'));
4804 newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest());
4805 if(sleep<0):
4806 sleep = geturls_download_sleep;
4807 urlparts = urlparse.urlparse(httpurl);
4808 if(isinstance(httpheaders, list)):
4809 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
4810 httpheaders = fix_header_names(httpheaders);
4811 if(httpuseragent is not None):
4812 if('User-Agent' in httpheaders):
4813 httpheaders['User-Agent'] = httpuseragent;
4814 else:
4815 httpuseragent.update({'User-Agent': httpuseragent});
4816 if(httpreferer is not None):
4817 if('Referer' in httpheaders):
4818 httpheaders['Referer'] = httpreferer;
4819 else:
4820 httpuseragent.update({'Referer': httpreferer});
4821 if(isinstance(httpheaders, dict)):
4822 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
4823 time.sleep(sleep);
4824 geturls_text = download_file_from_sftp_file(httpurl);
4825 if(not geturls_text):
4826 return False;
4827 geturls_text.seek(0, 2);
4828 downloadsize = geturls_text.tell();
4829 geturls_text.seek(0, 0);
4830 if(downloadsize is not None):
4831 downloadsize = int(downloadsize);
4832 if downloadsize is None: downloadsize = 0;
4833 fulldatasize = 0;
4834 prevdownsize = 0;
4835 log.info("Downloading URL "+httpurl);
4836 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
4837 tmpfilename = f.name;
4838 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None};
4839 while True:
4840 databytes = geturls_text.read(buffersize);
4841 if not databytes: break;
4842 datasize = len(databytes);
4843 fulldatasize = datasize + fulldatasize;
4844 percentage = "";
4845 if(downloadsize>0):
4846 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
4847 downloaddiff = fulldatasize - prevdownsize;
4848 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
4849 prevdownsize = fulldatasize;
4850 f.write(databytes);
4851 f.close();
4852 geturls_text.close();
4853 exec_time_end = time.time();
4854 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to download file.");
4855 returnval.update({'Filesize': os.path.getsize(tmpfilename), 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)});
4856 return returnval;
4858 if(not haveparamiko):
4859 def download_from_url_file_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
4860 return False;
4862 if(haveparamiko):
4863 def download_from_url_to_file_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
4864 global geturls_download_sleep;
4865 if(sleep<0):
4866 sleep = geturls_download_sleep;
4867 if(not outfile=="-"):
4868 outpath = outpath.rstrip(os.path.sep);
4869 filepath = os.path.realpath(outpath+os.path.sep+outfile);
4870 if(not os.path.exists(outpath)):
4871 os.makedirs(outpath);
4872 if(os.path.exists(outpath) and os.path.isfile(outpath)):
4873 return False;
4874 if(os.path.exists(filepath) and os.path.isdir(filepath)):
4875 return False;
4876 pretmpfilename = download_from_url_file_with_sftp(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
4877 if(not pretmpfilename):
4878 return False;
4879 tmpfilename = pretmpfilename['Filename'];
4880 downloadsize = os.path.getsize(tmpfilename);
4881 fulldatasize = 0;
4882 log.info("Moving file "+tmpfilename+" to "+filepath);
4883 exec_time_start = time.time();
4884 shutil.move(tmpfilename, filepath);
4885 exec_time_end = time.time();
4886 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to move file.");
4887 if(os.path.exists(tmpfilename)):
4888 os.remove(tmpfilename);
4889 returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': None, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
4890 if(outfile=="-" and sys.version[0]=="2"):
4891 pretmpfilename = download_from_url_file_with_sftp(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
4892 if(not pretmpfilename):
4893 return False;
4894 tmpfilename = pretmpfilename['Filename'];
4895 downloadsize = os.path.getsize(tmpfilename);
4896 fulldatasize = 0;
4897 prevdownsize = 0;
4898 exec_time_start = time.time();
4899 with open(tmpfilename, 'rb') as ft:
4900 f = StringIO();
4901 while True:
4902 databytes = ft.read(buffersize[1]);
4903 if not databytes: break;
4904 datasize = len(databytes);
4905 fulldatasize = datasize + fulldatasize;
4906 percentage = "";
4907 if(downloadsize>0):
4908 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
4909 downloaddiff = fulldatasize - prevdownsize;
4910 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
4911 prevdownsize = fulldatasize;
4912 f.write(databytes);
4913 f.seek(0);
4914 fdata = f.getvalue();
4915 f.close();
4916 ft.close();
4917 os.remove(tmpfilename);
4918 exec_time_end = time.time();
4919 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
4920 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': None, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
4921 if(outfile=="-" and sys.version[0]>="3"):
4922 pretmpfilename = download_from_url_file_with_sftp(httpurl, httpheaders, httpuseragent, httpreferer, httpcookie, httpmethod, postdata, buffersize[0], sleep);
4923 tmpfilename = pretmpfilename['Filename'];
4924 downloadsize = os.path.getsize(tmpfilename);
4925 fulldatasize = 0;
4926 prevdownsize = 0;
4927 exec_time_start = time.time();
4928 with open(tmpfilename, 'rb') as ft:
4929 f = BytesIO();
4930 while True:
4931 databytes = ft.read(buffersize[1]);
4932 if not databytes: break;
4933 datasize = len(databytes);
4934 fulldatasize = datasize + fulldatasize;
4935 percentage = "";
4936 if(downloadsize>0):
4937 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
4938 downloaddiff = fulldatasize - prevdownsize;
4939 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
4940 prevdownsize = fulldatasize;
4941 f.write(databytes);
4942 f.seek(0);
4943 fdata = f.getvalue();
4944 f.close();
4945 ft.close();
4946 os.remove(tmpfilename);
4947 exec_time_end = time.time();
4948 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
4949 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': None, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
4950 return returnval;
4952 if(not haveparamiko):
4953 def download_from_url_to_file_with_sftp(httpurl, httpheaders=geturls_headers, httpuseragent=None, httpreferer=None, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
4954 return False;
4956 if(haveparamiko):
4957 def upload_file_to_sftp_file(sftpfile, url):
4958 urlparts = urlparse.urlparse(url);
4959 file_name = os.path.basename(urlparts.path);
4960 file_dir = os.path.dirname(urlparts.path);
4961 sftp_port = urlparts.port;
4962 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
4963 return False;
4964 if(urlparts.port is None):
4965 sftp_port = 22;
4966 else:
4967 sftp_port = urlparts.port;
4968 if(urlparts.username is not None):
4969 sftp_username = urlparts.username;
4970 else:
4971 sftp_username = "anonymous";
4972 if(urlparts.password is not None):
4973 sftp_password = urlparts.password;
4974 elif(urlparts.password is None and urlparts.username=="anonymous"):
4975 sftp_password = "anonymous";
4976 else:
4977 sftp_password = "";
4978 if(urlparts.scheme!="sftp"):
4979 return False;
4980 ssh = paramiko.SSHClient();
4981 ssh.load_system_host_keys();
4982 ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy());
4983 try:
4984 ssh.connect(urlparts.hostname, port=sftp_port, username=urlparts.username, password=urlparts.password);
4985 except paramiko.ssh_exception.SSHException:
4986 return False;
4987 except socket.gaierror:
4988 log.info("Error With URL "+httpurl);
4989 return False;
4990 except socket.timeout:
4991 log.info("Error With URL "+httpurl);
4992 return False;
4993 sftp = ssh.open_sftp();
4994 sftp.putfo(sftpfile, urlparts.path);
4995 sftp.close();
4996 ssh.close();
4997 sftpfile.seek(0, 0);
4998 return sftpfile;
4999 else:
5000 def upload_file_to_sftp_file(sftpfile, url):
5001 return False;
5003 if(haveparamiko):
5004 def upload_file_to_sftp_string(sftpstring, url):
5005 sftpfileo = BytesIO(sftpstring);
5006 sftpfile = upload_file_to_sftp_files(ftpfileo, url);
5007 sftpfileo.close();
5008 return sftpfile;
5009 else:
5010 def upload_file_to_sftp_string(url):
5011 return False;
5014 if(havepysftp):
5015 def download_file_from_pysftp_file(url):
5016 urlparts = urlparse.urlparse(url);
5017 file_name = os.path.basename(urlparts.path);
5018 file_dir = os.path.dirname(urlparts.path);
5019 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
5020 return False;
5021 sftp_port = urlparts.port;
5022 if(urlparts.port is None):
5023 sftp_port = 22;
5024 else:
5025 sftp_port = urlparts.port;
5026 if(urlparts.username is not None):
5027 sftp_username = urlparts.username;
5028 else:
5029 sftp_username = "anonymous";
5030 if(urlparts.password is not None):
5031 sftp_password = urlparts.password;
5032 elif(urlparts.password is None and urlparts.username=="anonymous"):
5033 sftp_password = "anonymous";
5034 else:
5035 sftp_password = "";
5036 if(urlparts.scheme!="sftp"):
5037 return False;
5038 try:
5039 pysftp.Connection(urlparts.hostname, port=sftp_port, username=urlparts.username, password=urlparts.password);
5040 except paramiko.ssh_exception.SSHException:
5041 return False;
5042 except socket.gaierror:
5043 log.info("Error With URL "+httpurl);
5044 return False;
5045 except socket.timeout:
5046 log.info("Error With URL "+httpurl);
5047 return False;
5048 sftp = ssh.open_sftp();
5049 sftpfile = BytesIO();
5050 sftp.getfo(urlparts.path, sftpfile);
5051 sftp.close();
5052 ssh.close();
5053 sftpfile.seek(0, 0);
5054 return sftpfile;
5055 else:
5056 def download_file_from_pysftp_file(url):
5057 return False;
5059 if(havepysftp):
5060 def download_file_from_pysftp_string(url):
5061 sftpfile = download_file_from_pysftp_file(url);
5062 return sftpfile.read();
5063 else:
5064 def download_file_from_ftp_string(url):
5065 return False;
5067 if(havepysftp):
5068 def download_from_url_with_pysftp(httpurl, httpheaders=geturls_headers, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
5069 global geturls_download_sleep, havebrotli;
5070 if(sleep<0):
5071 sleep = geturls_download_sleep;
5072 urlparts = urlparse.urlparse(httpurl);
5073 if(isinstance(httpheaders, list)):
5074 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
5075 httpheaders = fix_header_names(httpheaders);
5076 if(isinstance(httpheaders, dict)):
5077 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
5078 time.sleep(sleep);
5079 geturls_text = download_file_from_pysftp_file(httpurl);
5080 if(not geturls_text):
5081 return False;
5082 log.info("Downloading URL "+httpurl);
5083 returnval_content = geturls_text.read()[:];
5084 returnval = {'Type': "Content", 'Content': returnval_content, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None};
5085 geturls_text.close();
5086 return returnval;
5088 if(not havepysftp):
5089 def download_from_url_with_pysftp(httpurl, httpheaders=geturls_headers, httpcookie=geturls_cj, httpmethod="GET", postdata=None, sleep=-1):
5090 return False;
5092 if(havepysftp):
5093 def download_from_url_file_with_pysftp(httpurl, httpheaders=geturls_headers, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
5094 global geturls_download_sleep, tmpfileprefix, tmpfilesuffix;
5095 exec_time_start = time.time();
5096 myhash = hashlib.new("sha1");
5097 if(sys.version[0]=="2"):
5098 myhash.update(httpurl);
5099 myhash.update(str(buffersize));
5100 myhash.update(str(exec_time_start));
5101 if(sys.version[0]>="3"):
5102 myhash.update(httpurl.encode('utf-8'));
5103 myhash.update(str(buffersize).encode('utf-8'));
5104 myhash.update(str(exec_time_start).encode('utf-8'));
5105 newtmpfilesuffix = tmpfilesuffix + str(myhash.hexdigest());
5106 if(sleep<0):
5107 sleep = geturls_download_sleep;
5108 urlparts = urlparse.urlparse(httpurl);
5109 if(isinstance(httpheaders, list)):
5110 httpheaders = make_http_headers_from_list_to_dict(httpheaders);
5111 httpheaders = fix_header_names(httpheaders);
5112 if(isinstance(httpheaders, dict)):
5113 httpheaders = make_http_headers_from_dict_to_list(httpheaders);
5114 time.sleep(sleep);
5115 geturls_text = download_file_from_pysftp_file(httpurl);
5116 if(not geturls_text):
5117 return False;
5118 geturls_text.seek(0, 2);
5119 downloadsize = geturls_text.tell();
5120 geturls_text.seek(0, 0);
5121 if(downloadsize is not None):
5122 downloadsize = int(downloadsize);
5123 if downloadsize is None: downloadsize = 0;
5124 fulldatasize = 0;
5125 prevdownsize = 0;
5126 log.info("Downloading URL "+httpurl);
5127 with tempfile.NamedTemporaryFile('wb+', prefix=tmpfileprefix, suffix=newtmpfilesuffix, delete=False) as f:
5128 tmpfilename = f.name;
5129 returnval = {'Type': "File", 'Filename': tmpfilename, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'Headers': None, 'Version': None, 'Method': None, 'HeadersSent': None, 'URL': httpurl, 'Code': None};
5130 while True:
5131 databytes = geturls_text.read(buffersize);
5132 if not databytes: break;
5133 datasize = len(databytes);
5134 fulldatasize = datasize + fulldatasize;
5135 percentage = "";
5136 if(downloadsize>0):
5137 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
5138 downloaddiff = fulldatasize - prevdownsize;
5139 log.info("Downloading "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Downloaded "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
5140 prevdownsize = fulldatasize;
5141 f.write(databytes);
5142 f.close();
5143 geturls_text.close();
5144 exec_time_end = time.time();
5145 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to download file.");
5146 returnval.update({'Filesize': os.path.getsize(tmpfilename), 'DownloadTime': float(exec_time_start - exec_time_end), 'DownloadTimeReadable': hms_string(exec_time_start - exec_time_end)});
5147 return returnval;
5149 if(not havepysftp):
5150 def download_from_url_file_with_pysftp(httpurl, httpheaders=geturls_headers, httpcookie=geturls_cj, httpmethod="GET", postdata=None, buffersize=524288, sleep=-1):
5151 return False;
5153 if(havepysftp):
5154 def download_from_url_to_file_with_pysftp(httpurl, httpheaders=geturls_headers, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
5155 global geturls_download_sleep;
5156 if(sleep<0):
5157 sleep = geturls_download_sleep;
5158 if(not outfile=="-"):
5159 outpath = outpath.rstrip(os.path.sep);
5160 filepath = os.path.realpath(outpath+os.path.sep+outfile);
5161 if(not os.path.exists(outpath)):
5162 os.makedirs(outpath);
5163 if(os.path.exists(outpath) and os.path.isfile(outpath)):
5164 return False;
5165 if(os.path.exists(filepath) and os.path.isdir(filepath)):
5166 return False;
5167 pretmpfilename = download_from_url_file_with_pysftp(httpurl, httpheaders, httpcookie, httpmethod, postdata, buffersize[0], sleep);
5168 if(not pretmpfilename):
5169 return False;
5170 tmpfilename = pretmpfilename['Filename'];
5171 downloadsize = os.path.getsize(tmpfilename);
5172 fulldatasize = 0;
5173 log.info("Moving file "+tmpfilename+" to "+filepath);
5174 exec_time_start = time.time();
5175 shutil.move(tmpfilename, filepath);
5176 exec_time_end = time.time();
5177 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to move file.");
5178 if(os.path.exists(tmpfilename)):
5179 os.remove(tmpfilename);
5180 returnval = {'Type': "File", 'Filename': filepath, 'Filesize': downloadsize, 'FilesizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': None, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
5181 if(outfile=="-" and sys.version[0]=="2"):
5182 pretmpfilename = download_from_url_file_with_pysftp(httpurl, httpheaders, httpcookie, httpmethod, postdata, buffersize[0], sleep);
5183 if(not pretmpfilename):
5184 return False;
5185 tmpfilename = pretmpfilename['Filename'];
5186 downloadsize = os.path.getsize(tmpfilename);
5187 fulldatasize = 0;
5188 prevdownsize = 0;
5189 exec_time_start = time.time();
5190 with open(tmpfilename, 'rb') as ft:
5191 f = StringIO();
5192 while True:
5193 databytes = ft.read(buffersize[1]);
5194 if not databytes: break;
5195 datasize = len(databytes);
5196 fulldatasize = datasize + fulldatasize;
5197 percentage = "";
5198 if(downloadsize>0):
5199 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
5200 downloaddiff = fulldatasize - prevdownsize;
5201 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
5202 prevdownsize = fulldatasize;
5203 f.write(databytes);
5204 f.seek(0);
5205 fdata = f.getvalue();
5206 f.close();
5207 ft.close();
5208 os.remove(tmpfilename);
5209 exec_time_end = time.time();
5210 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
5211 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': None, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
5212 if(outfile=="-" and sys.version[0]>="3"):
5213 pretmpfilename = download_from_url_file_with_pysftp(httpurl, httpheaders, httpcookie, httpmethod, postdata, buffersize[0], sleep);
5214 tmpfilename = pretmpfilename['Filename'];
5215 downloadsize = os.path.getsize(tmpfilename);
5216 fulldatasize = 0;
5217 prevdownsize = 0;
5218 exec_time_start = time.time();
5219 with open(tmpfilename, 'rb') as ft:
5220 f = BytesIO();
5221 while True:
5222 databytes = ft.read(buffersize[1]);
5223 if not databytes: break;
5224 datasize = len(databytes);
5225 fulldatasize = datasize + fulldatasize;
5226 percentage = "";
5227 if(downloadsize>0):
5228 percentage = str("{0:.2f}".format(float(float(fulldatasize / downloadsize) * 100))).rstrip('0').rstrip('.')+"%";
5229 downloaddiff = fulldatasize - prevdownsize;
5230 log.info("Copying "+get_readable_size(fulldatasize, 2, "SI")['ReadableWithSuffix']+" / "+get_readable_size(downloadsize, 2, "SI")['ReadableWithSuffix']+" "+str(percentage)+" / Copied "+get_readable_size(downloaddiff, 2, "IEC")['ReadableWithSuffix']);
5231 prevdownsize = fulldatasize;
5232 f.write(databytes);
5233 f.seek(0);
5234 fdata = f.getvalue();
5235 f.close();
5236 ft.close();
5237 os.remove(tmpfilename);
5238 exec_time_end = time.time();
5239 log.info("It took "+hms_string(exec_time_start - exec_time_end)+" to copy file.");
5240 returnval = {'Type': "Content", 'Content': fdata, 'Contentsize': downloadsize, 'ContentsizeAlt': {'IEC': get_readable_size(downloadsize, 2, "IEC"), 'SI': get_readable_size(downloadsize, 2, "SI")}, 'DownloadTime': pretmpfilename['DownloadTime'], 'DownloadTimeReadable': pretmpfilename['DownloadTimeReadable'], 'MoveFileTime': float(exec_time_start - exec_time_end), 'MoveFileTimeReadable': hms_string(exec_time_start - exec_time_end), 'Headers': pretmpfilename['Headers'], 'Version': pretmpfilename['Version'], 'Method': pretmpfilename['Method'], 'Method': None, 'HeadersSent': pretmpfilename['HeadersSent'], 'URL': pretmpfilename['URL'], 'Code': pretmpfilename['Code']};
5241 return returnval;
5243 if(not havepysftp):
5244 def download_from_url_to_file_with_pysftp(httpurl, httpheaders=geturls_headers, httpcookie=geturls_cj, httpmethod="GET", postdata=None, outfile="-", outpath=os.getcwd(), buffersize=[524288, 524288], sleep=-1):
5245 return False;
5247 if(havepysftp):
5248 def upload_file_to_pysftp_file(sftpfile, url):
5249 urlparts = urlparse.urlparse(url);
5250 file_name = os.path.basename(urlparts.path);
5251 file_dir = os.path.dirname(urlparts.path);
5252 sftp_port = urlparts.port;
5253 if(urlparts.scheme=="http" or urlparts.scheme=="https"):
5254 return False;
5255 if(urlparts.port is None):
5256 sftp_port = 22;
5257 else:
5258 sftp_port = urlparts.port;
5259 if(urlparts.username is not None):
5260 sftp_username = urlparts.username;
5261 else:
5262 sftp_username = "anonymous";
5263 if(urlparts.password is not None):
5264 sftp_password = urlparts.password;
5265 elif(urlparts.password is None and urlparts.username=="anonymous"):
5266 sftp_password = "anonymous";
5267 else:
5268 sftp_password = "";
5269 if(urlparts.scheme!="sftp"):
5270 return False;
5271 try:
5272 pysftp.Connection(urlparts.hostname, port=sftp_port, username=urlparts.username, password=urlparts.password);
5273 except paramiko.ssh_exception.SSHException:
5274 return False;
5275 except socket.gaierror:
5276 log.info("Error With URL "+httpurl);
5277 return False;
5278 except socket.timeout:
5279 log.info("Error With URL "+httpurl);
5280 return False;
5281 sftp = ssh.open_sftp();
5282 sftp.putfo(sftpfile, urlparts.path);
5283 sftp.close();
5284 ssh.close();
5285 sftpfile.seek(0, 0);
5286 return sftpfile;
5287 else:
5288 def upload_file_to_pysftp_file(sftpfile, url):
5289 return False;
5291 if(havepysftp):
5292 def upload_file_to_pysftp_string(sftpstring, url):
5293 sftpfileo = BytesIO(sftpstring);
5294 sftpfile = upload_file_to_pysftp_files(ftpfileo, url);
5295 sftpfileo.close();
5296 return sftpfile;
5297 else:
5298 def upload_file_to_pysftp_string(url):
5299 return False;