1 """An extensible library for opening URLs using a variety of protocols
3 The simplest way to use this module is to call the urlopen function,
4 which accepts a string containing a URL or a Request object (described
5 below). It opens the URL and returns the results as file-like
6 object; the returned object has some extra methods described below.
8 The OpenerDirector manages a collection of Handler objects that do
9 all the actual work. Each Handler implements a particular protocol or
10 option. The OpenerDirector is a composite object that invokes the
11 Handlers needed to open the requested URL. For example, the
12 HTTPHandler performs HTTP GET and POST requests and deals with
13 non-error returns. The HTTPRedirectHandler automatically deals with
14 HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
15 deals with digest authentication.
17 urlopen(url, data=None) -- Basic usage is the same as original
18 urllib. pass the url and optionally data to post to an HTTP URL, and
19 get a file-like object back. One difference is that you can also pass
20 a Request instance instead of URL. Raises a URLError (subclass of
21 IOError); for HTTP errors, raises an HTTPError, which can also be
22 treated as a valid response.
24 build_opener -- Function that creates a new OpenerDirector instance.
25 Will install the default handlers. Accepts one or more Handlers as
26 arguments, either instances or Handler classes that it will
27 instantiate. If one of the argument is a subclass of the default
28 handler, the argument will be installed instead of the default.
30 install_opener -- Installs a new opener as the default opener.
35 Request -- An object that encapsulates the state of a request. The
36 state can be as simple as the URL. It can also include extra HTTP
37 headers, e.g. a User-Agent.
42 URLError -- A subclass of IOError, individual protocols have their own
45 HTTPError -- Also a valid HTTP response, so you can treat an HTTP error
46 as an exceptional event or valid response.
49 BaseHandler and parent
50 _call_chain conventions
56 # set up authentication info
57 authinfo = urllib2.HTTPBasicAuthHandler()
58 authinfo.add_password(realm='PDQ Application',
59 uri='https://mahler:8092/site-updates.py',
61 passwd='geheim$parole')
63 proxy_support = urllib2.ProxyHandler({"http" : "http://ahad-haam:3128"})
65 # build a new opener that adds authentication and caching FTP handlers
66 opener = urllib2.build_opener(proxy_support, authinfo, urllib2.CacheFTPHandler)
69 urllib2.install_opener(opener)
71 f = urllib2.urlopen('http://www.python.org/')
77 # If an authentication error handler that tries to perform
78 # authentication for some reason but fails, how should the error be
79 # signalled? The client needs to know the HTTP error code. But if
80 # the handler knows that the problem was, e.g., that it didn't know
81 # that hash algo that requested in the challenge, it would be good to
82 # pass that information along to the client, too.
83 # ftp errors aren't handled cleanly
84 # check digest against correct (i.e. non-apache) implementation
86 # Possible extensions:
87 # complex proxies XXX not sure what exactly was meant by this
88 # abstract factory for opener
105 from cStringIO
import StringIO
107 from StringIO
import StringIO
109 from urllib
import (unwrap
, unquote
, splittype
, splithost
, quote
,
110 addinfourl
, splitport
, splitquery
,
111 splitattr
, ftpwrapper
, noheaders
, splituser
, splitpasswd
, splitvalue
)
113 # support for FileHandler, proxies via environment variables
114 from urllib
import localhost
, url2pathname
, getproxies
116 # used in User-Agent header sent
117 __version__
= sys
.version
[:3]
120 def urlopen(url
, data
=None, timeout
=None):
123 _opener
= build_opener()
124 return _opener
.open(url
, data
, timeout
)
126 def install_opener(opener
):
130 # do these error classes make sense?
131 # make sure all of the IOError stuff is overridden. we just want to be
134 class URLError(IOError):
135 # URLError is a sub-type of IOError, but it doesn't share any of
136 # the implementation. need to override __init__ and __str__.
137 # It sets self.args for compatibility with other EnvironmentError
138 # subclasses, but args doesn't have the typical format with errno in
139 # slot 0 and strerror in slot 1. This may be better than nothing.
140 def __init__(self
, reason
):
145 return '<urlopen error %s>' % self
.reason
147 class HTTPError(URLError
, addinfourl
):
148 """Raised when HTTP error occurs, but also acts like non-error return"""
149 __super_init
= addinfourl
.__init
__
151 def __init__(self
, url
, code
, msg
, hdrs
, fp
):
157 # The addinfourl classes depend on fp being a valid file
158 # object. In some cases, the HTTPError may not have a valid
159 # file object. If this happens, the simplest workaround is to
160 # not initialize the base classes.
162 self
.__super
_init
(fp
, hdrs
, url
)
165 return 'HTTP Error %s: %s' % (self
.code
, self
.msg
)
167 # copied from cookielib.py
168 _cut_port_re
= re
.compile(r
":\d+$")
169 def request_host(request
):
170 """Return request-host, as defined by RFC 2965.
172 Variation from RFC: returned value is lowercased, for convenient
176 url
= request
.get_full_url()
177 host
= urlparse
.urlparse(url
)[1]
179 host
= request
.get_header("Host", "")
181 # remove port, if present
182 host
= _cut_port_re
.sub("", host
, 1)
187 def __init__(self
, url
, data
=None, headers
={},
188 origin_req_host
=None, unverifiable
=False):
189 # unwrap('<URL:type://host/path>') --> 'type://host/path'
190 self
.__original
= unwrap(url
)
192 # self.__r_type is what's left after doing the splittype
197 for key
, value
in headers
.items():
198 self
.add_header(key
, value
)
199 self
.unredirected_hdrs
= {}
200 if origin_req_host
is None:
201 origin_req_host
= request_host(self
)
202 self
.origin_req_host
= origin_req_host
203 self
.unverifiable
= unverifiable
205 def __getattr__(self
, attr
):
206 # XXX this is a fallback mechanism to guard against these
207 # methods getting called in a non-standard order. this may be
208 # too complicated and/or unnecessary.
209 # XXX should the __r_XXX attributes be public?
210 if attr
[:12] == '_Request__r_':
212 if hasattr(Request
, 'get_' + name
):
213 getattr(self
, 'get_' + name
)()
214 return getattr(self
, attr
)
215 raise AttributeError, attr
217 def get_method(self
):
223 # XXX these helper methods are lame
225 def add_data(self
, data
):
229 return self
.data
is not None
234 def get_full_url(self
):
235 return self
.__original
238 if self
.type is None:
239 self
.type, self
.__r
_type
= splittype(self
.__original
)
240 if self
.type is None:
241 raise ValueError, "unknown url type: %s" % self
.__original
245 if self
.host
is None:
246 self
.host
, self
.__r
_host
= splithost(self
.__r
_type
)
248 self
.host
= unquote(self
.host
)
251 def get_selector(self
):
254 def set_proxy(self
, host
, type):
255 self
.host
, self
.type = host
, type
256 self
.__r
_host
= self
.__original
258 def get_origin_req_host(self
):
259 return self
.origin_req_host
261 def is_unverifiable(self
):
262 return self
.unverifiable
264 def add_header(self
, key
, val
):
265 # useful for something like authentication
266 self
.headers
[key
.capitalize()] = val
268 def add_unredirected_header(self
, key
, val
):
269 # will not be added to a redirected request
270 self
.unredirected_hdrs
[key
.capitalize()] = val
272 def has_header(self
, header_name
):
273 return (header_name
in self
.headers
or
274 header_name
in self
.unredirected_hdrs
)
276 def get_header(self
, header_name
, default
=None):
277 return self
.headers
.get(
279 self
.unredirected_hdrs
.get(header_name
, default
))
281 def header_items(self
):
282 hdrs
= self
.unredirected_hdrs
.copy()
283 hdrs
.update(self
.headers
)
286 class OpenerDirector
:
288 client_version
= "Python-urllib/%s" % __version__
289 self
.addheaders
= [('User-agent', client_version
)]
290 # manage the individual handlers
292 self
.handle_open
= {}
293 self
.handle_error
= {}
294 self
.process_response
= {}
295 self
.process_request
= {}
297 def add_handler(self
, handler
):
298 if not hasattr(handler
, "add_parent"):
299 raise TypeError("expected BaseHandler instance, got %r" %
303 for meth
in dir(handler
):
304 if meth
in ["redirect_request", "do_open", "proxy_open"]:
305 # oops, coincidental match
310 condition
= meth
[i
+1:]
312 if condition
.startswith("error"):
313 j
= condition
.find("_") + i
+ 1
319 lookup
= self
.handle_error
.get(protocol
, {})
320 self
.handle_error
[protocol
] = lookup
321 elif condition
== "open":
323 lookup
= self
.handle_open
324 elif condition
== "response":
326 lookup
= self
.process_response
327 elif condition
== "request":
329 lookup
= self
.process_request
333 handlers
= lookup
.setdefault(kind
, [])
335 bisect
.insort(handlers
, handler
)
337 handlers
.append(handler
)
341 # the handlers must work in an specific order, the order
342 # is specified in a Handler attribute
343 bisect
.insort(self
.handlers
, handler
)
344 handler
.add_parent(self
)
347 # Only exists for backwards compatibility.
350 def _call_chain(self
, chain
, kind
, meth_name
, *args
):
351 # Handlers raise an exception if no one else should try to handle
352 # the request, or return None if they can't but another handler
353 # could. Otherwise, they return the response.
354 handlers
= chain
.get(kind
, ())
355 for handler
in handlers
:
356 func
= getattr(handler
, meth_name
)
359 if result
is not None:
362 def open(self
, fullurl
, data
=None, timeout
=None):
363 # accept a URL or a Request object
364 if isinstance(fullurl
, basestring
):
365 req
= Request(fullurl
, data
)
371 req
.timeout
= timeout
372 protocol
= req
.get_type()
374 # pre-process request
375 meth_name
= protocol
+"_request"
376 for processor
in self
.process_request
.get(protocol
, []):
377 meth
= getattr(processor
, meth_name
)
380 response
= self
._open
(req
, data
)
382 # post-process response
383 meth_name
= protocol
+"_response"
384 for processor
in self
.process_response
.get(protocol
, []):
385 meth
= getattr(processor
, meth_name
)
386 response
= meth(req
, response
)
390 def _open(self
, req
, data
=None):
391 result
= self
._call
_chain
(self
.handle_open
, 'default',
396 protocol
= req
.get_type()
397 result
= self
._call
_chain
(self
.handle_open
, protocol
, protocol
+
402 return self
._call
_chain
(self
.handle_open
, 'unknown',
405 def error(self
, proto
, *args
):
406 if proto
in ('http', 'https'):
407 # XXX http[s] protocols are special-cased
408 dict = self
.handle_error
['http'] # https is not different than http
409 proto
= args
[2] # YUCK!
410 meth_name
= 'http_error_%s' % proto
414 dict = self
.handle_error
415 meth_name
= proto
+ '_error'
417 args
= (dict, proto
, meth_name
) + args
418 result
= self
._call
_chain
(*args
)
423 args
= (dict, 'default', 'http_error_default') + orig_args
424 return self
._call
_chain
(*args
)
426 # XXX probably also want an abstract factory that knows when it makes
427 # sense to skip a superclass in favor of a subclass and when it might
428 # make sense to include both
430 def build_opener(*handlers
):
431 """Create an opener object from a list of handlers.
433 The opener will use several default handlers, including support
436 If any of the handlers passed as arguments are subclasses of the
437 default handlers, the default handlers will not be used.
441 return isinstance(obj
, types
.ClassType
) or hasattr(obj
, "__bases__")
443 opener
= OpenerDirector()
444 default_classes
= [ProxyHandler
, UnknownHandler
, HTTPHandler
,
445 HTTPDefaultErrorHandler
, HTTPRedirectHandler
,
446 FTPHandler
, FileHandler
, HTTPErrorProcessor
]
447 if hasattr(httplib
, 'HTTPS'):
448 default_classes
.append(HTTPSHandler
)
450 for klass
in default_classes
:
451 for check
in handlers
:
453 if issubclass(check
, klass
):
455 elif isinstance(check
, klass
):
458 default_classes
.remove(klass
)
460 for klass
in default_classes
:
461 opener
.add_handler(klass())
466 opener
.add_handler(h
)
472 def add_parent(self
, parent
):
476 # Only exists for backwards compatibility
479 def __lt__(self
, other
):
480 if not hasattr(other
, "handler_order"):
481 # Try to preserve the old behavior of having custom classes
482 # inserted after default ones (works only for custom user
483 # classes which are not aware of handler_order).
485 return self
.handler_order
< other
.handler_order
488 class HTTPErrorProcessor(BaseHandler
):
489 """Process HTTP error responses."""
490 handler_order
= 1000 # after all other processing
492 def http_response(self
, request
, response
):
493 code
, msg
, hdrs
= response
.code
, response
.msg
, response
.info()
495 # According to RFC 2616, "2xx" code indicates that the client's
496 # request was successfully received, understood, and accepted.
497 if not (200 <= code
< 300):
498 response
= self
.parent
.error(
499 'http', request
, response
, code
, msg
, hdrs
)
503 https_response
= http_response
505 class HTTPDefaultErrorHandler(BaseHandler
):
506 def http_error_default(self
, req
, fp
, code
, msg
, hdrs
):
507 raise HTTPError(req
.get_full_url(), code
, msg
, hdrs
, fp
)
509 class HTTPRedirectHandler(BaseHandler
):
510 # maximum number of redirections to any single URL
511 # this is needed because of the state that cookies introduce
513 # maximum total number of redirections (regardless of URL) before
514 # assuming we're in a loop
515 max_redirections
= 10
517 def redirect_request(self
, req
, fp
, code
, msg
, headers
, newurl
):
518 """Return a Request or None in response to a redirect.
520 This is called by the http_error_30x methods when a
521 redirection response is received. If a redirection should
522 take place, return a new Request to allow http_error_30x to
523 perform the redirect. Otherwise, raise HTTPError if no-one
524 else should try to handle this url. Return None if you can't
525 but another Handler might.
528 if (code
in (301, 302, 303, 307) and m
in ("GET", "HEAD")
529 or code
in (301, 302, 303) and m
== "POST"):
530 # Strictly (according to RFC 2616), 301 or 302 in response
531 # to a POST MUST NOT cause a redirection without confirmation
532 # from the user (of urllib2, in this case). In practice,
533 # essentially all clients do redirect in this case, so we
535 # be conciliant with URIs containing a space
536 newurl
= newurl
.replace(' ', '%20')
537 return Request(newurl
,
539 origin_req_host
=req
.get_origin_req_host(),
542 raise HTTPError(req
.get_full_url(), code
, msg
, headers
, fp
)
544 # Implementation note: To avoid the server sending us into an
545 # infinite loop, the request object needs to track what URLs we
546 # have already seen. Do this by adding a handler-specific
547 # attribute to the Request object.
548 def http_error_302(self
, req
, fp
, code
, msg
, headers
):
549 # Some servers (incorrectly) return multiple Location headers
550 # (so probably same goes for URI). Use first header.
551 if 'location' in headers
:
552 newurl
= headers
.getheaders('location')[0]
553 elif 'uri' in headers
:
554 newurl
= headers
.getheaders('uri')[0]
557 newurl
= urlparse
.urljoin(req
.get_full_url(), newurl
)
559 # XXX Probably want to forget about the state of the current
560 # request, although that might interact poorly with other
561 # handlers that also use handler-specific request attributes
562 new
= self
.redirect_request(req
, fp
, code
, msg
, headers
, newurl
)
567 # .redirect_dict has a key url if url was previously visited.
568 if hasattr(req
, 'redirect_dict'):
569 visited
= new
.redirect_dict
= req
.redirect_dict
570 if (visited
.get(newurl
, 0) >= self
.max_repeats
or
571 len(visited
) >= self
.max_redirections
):
572 raise HTTPError(req
.get_full_url(), code
,
573 self
.inf_msg
+ msg
, headers
, fp
)
575 visited
= new
.redirect_dict
= req
.redirect_dict
= {}
576 visited
[newurl
] = visited
.get(newurl
, 0) + 1
578 # Don't close the fp until we are sure that we won't use it
583 return self
.parent
.open(new
)
585 http_error_301
= http_error_303
= http_error_307
= http_error_302
587 inf_msg
= "The HTTP server returned a redirect error that would " \
588 "lead to an infinite loop.\n" \
589 "The last 30x error message was:\n"
592 def _parse_proxy(proxy
):
593 """Return (scheme, user, password, host/port) given a URL or an authority.
595 If a URL is supplied, it must have an authority (host:port) component.
596 According to RFC 3986, having an authority component means the URL must
597 have two slashes after the scheme:
599 >>> _parse_proxy('file:/ftp.example.com/')
600 Traceback (most recent call last):
601 ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
603 The first three items of the returned tuple may be None.
605 Examples of authority parsing:
607 >>> _parse_proxy('proxy.example.com')
608 (None, None, None, 'proxy.example.com')
609 >>> _parse_proxy('proxy.example.com:3128')
610 (None, None, None, 'proxy.example.com:3128')
612 The authority component may optionally include userinfo (assumed to be
615 >>> _parse_proxy('joe:password@proxy.example.com')
616 (None, 'joe', 'password', 'proxy.example.com')
617 >>> _parse_proxy('joe:password@proxy.example.com:3128')
618 (None, 'joe', 'password', 'proxy.example.com:3128')
620 Same examples, but with URLs instead:
622 >>> _parse_proxy('http://proxy.example.com/')
623 ('http', None, None, 'proxy.example.com')
624 >>> _parse_proxy('http://proxy.example.com:3128/')
625 ('http', None, None, 'proxy.example.com:3128')
626 >>> _parse_proxy('http://joe:password@proxy.example.com/')
627 ('http', 'joe', 'password', 'proxy.example.com')
628 >>> _parse_proxy('http://joe:password@proxy.example.com:3128')
629 ('http', 'joe', 'password', 'proxy.example.com:3128')
631 Everything after the authority is ignored:
633 >>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128')
634 ('ftp', 'joe', 'password', 'proxy.example.com')
636 Test for no trailing '/' case:
638 >>> _parse_proxy('http://joe:password@proxy.example.com')
639 ('http', 'joe', 'password', 'proxy.example.com')
642 scheme
, r_scheme
= splittype(proxy
)
643 if not r_scheme
.startswith("/"):
649 if not r_scheme
.startswith("//"):
650 raise ValueError("proxy URL with no authority: %r" % proxy
)
651 # We have an authority, so for RFC 3986-compliant URLs (by ss 3.
652 # and 3.3.), path is empty or starts with '/'
653 end
= r_scheme
.find("/", 2)
656 authority
= r_scheme
[2:end
]
657 userinfo
, hostport
= splituser(authority
)
658 if userinfo
is not None:
659 user
, password
= splitpasswd(userinfo
)
661 user
= password
= None
662 return scheme
, user
, password
, hostport
664 class ProxyHandler(BaseHandler
):
665 # Proxies must be in front
668 def __init__(self
, proxies
=None):
670 proxies
= getproxies()
671 assert hasattr(proxies
, 'has_key'), "proxies must be a mapping"
672 self
.proxies
= proxies
673 for type, url
in proxies
.items():
674 setattr(self
, '%s_open' % type,
675 lambda r
, proxy
=url
, type=type, meth
=self
.proxy_open
: \
676 meth(r
, proxy
, type))
678 def proxy_open(self
, req
, proxy
, type):
679 orig_type
= req
.get_type()
680 proxy_type
, user
, password
, hostport
= _parse_proxy(proxy
)
681 if proxy_type
is None:
682 proxy_type
= orig_type
683 if user
and password
:
684 user_pass
= '%s:%s' % (unquote(user
), unquote(password
))
685 creds
= base64
.b64encode(user_pass
).strip()
686 req
.add_header('Proxy-authorization', 'Basic ' + creds
)
687 hostport
= unquote(hostport
)
688 req
.set_proxy(hostport
, proxy_type
)
689 if orig_type
== proxy_type
:
690 # let other handlers take care of it
693 # need to start over, because the other handlers don't
694 # grok the proxy's URL type
695 # e.g. if we have a constructor arg proxies like so:
696 # {'http': 'ftp://proxy.example.com'}, we may end up turning
697 # a request for http://acme.example.com/a into one for
698 # ftp://proxy.example.com/a
699 return self
.parent
.open(req
)
701 class HTTPPasswordMgr
:
706 def add_password(self
, realm
, uri
, user
, passwd
):
707 # uri could be a single URI or a sequence
708 if isinstance(uri
, basestring
):
710 if not realm
in self
.passwd
:
711 self
.passwd
[realm
] = {}
712 for default_port
in True, False:
714 [self
.reduce_uri(u
, default_port
) for u
in uri
])
715 self
.passwd
[realm
][reduced_uri
] = (user
, passwd
)
717 def find_user_password(self
, realm
, authuri
):
718 domains
= self
.passwd
.get(realm
, {})
719 for default_port
in True, False:
720 reduced_authuri
= self
.reduce_uri(authuri
, default_port
)
721 for uris
, authinfo
in domains
.iteritems():
723 if self
.is_suburi(uri
, reduced_authuri
):
727 def reduce_uri(self
, uri
, default_port
=True):
728 """Accept authority or URI and extract only the authority and path."""
729 # note HTTP URLs do not have a userinfo component
730 parts
= urlparse
.urlsplit(uri
)
735 path
= parts
[2] or '/'
741 host
, port
= splitport(authority
)
742 if default_port
and port
is None and scheme
is not None:
746 if dport
is not None:
747 authority
= "%s:%d" % (host
, dport
)
748 return authority
, path
750 def is_suburi(self
, base
, test
):
751 """Check if test is below base in a URI tree
753 Both args must be URIs in reduced form.
757 if base
[0] != test
[0]:
759 common
= posixpath
.commonprefix((base
[1], test
[1]))
760 if len(common
) == len(base
[1]):
765 class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr
):
767 def find_user_password(self
, realm
, authuri
):
768 user
, password
= HTTPPasswordMgr
.find_user_password(self
, realm
,
771 return user
, password
772 return HTTPPasswordMgr
.find_user_password(self
, None, authuri
)
775 class AbstractBasicAuthHandler
:
777 # XXX this allows for multiple auth-schemes, but will stupidly pick
778 # the last one with a realm specified.
780 rx
= re
.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', re
.I
)
782 # XXX could pre-emptively send auth info already accepted (RFC 2617,
783 # end of section 2, and section 1.2 immediately after "credentials"
786 def __init__(self
, password_mgr
=None):
787 if password_mgr
is None:
788 password_mgr
= HTTPPasswordMgr()
789 self
.passwd
= password_mgr
790 self
.add_password
= self
.passwd
.add_password
792 def http_error_auth_reqed(self
, authreq
, host
, req
, headers
):
793 # host may be an authority (without userinfo) or a URL with an
795 # XXX could be multiple headers
796 authreq
= headers
.get(authreq
, None)
798 mo
= AbstractBasicAuthHandler
.rx
.search(authreq
)
800 scheme
, realm
= mo
.groups()
801 if scheme
.lower() == 'basic':
802 return self
.retry_http_basic_auth(host
, req
, realm
)
804 def retry_http_basic_auth(self
, host
, req
, realm
):
805 user
, pw
= self
.passwd
.find_user_password(realm
, host
)
807 raw
= "%s:%s" % (user
, pw
)
808 auth
= 'Basic %s' % base64
.b64encode(raw
).strip()
809 if req
.headers
.get(self
.auth_header
, None) == auth
:
811 req
.add_header(self
.auth_header
, auth
)
812 return self
.parent
.open(req
)
817 class HTTPBasicAuthHandler(AbstractBasicAuthHandler
, BaseHandler
):
819 auth_header
= 'Authorization'
821 def http_error_401(self
, req
, fp
, code
, msg
, headers
):
822 url
= req
.get_full_url()
823 return self
.http_error_auth_reqed('www-authenticate',
827 class ProxyBasicAuthHandler(AbstractBasicAuthHandler
, BaseHandler
):
829 auth_header
= 'Proxy-authorization'
831 def http_error_407(self
, req
, fp
, code
, msg
, headers
):
832 # http_error_auth_reqed requires that there is no userinfo component in
833 # authority. Assume there isn't one, since urllib2 does not (and
834 # should not, RFC 3986 s. 3.2.1) support requests for URLs containing
836 authority
= req
.get_host()
837 return self
.http_error_auth_reqed('proxy-authenticate',
838 authority
, req
, headers
)
842 """Return n random bytes."""
843 # Use /dev/urandom if it is available. Fall back to random module
844 # if not. It might be worthwhile to extend this function to use
845 # other platform-specific mechanisms for getting random bytes.
846 if os
.path
.exists("/dev/urandom"):
847 f
= open("/dev/urandom")
852 L
= [chr(random
.randrange(0, 256)) for i
in range(n
)]
855 class AbstractDigestAuthHandler
:
856 # Digest authentication is specified in RFC 2617.
858 # XXX The client does not inspect the Authentication-Info header
859 # in a successful response.
861 # XXX It should be possible to test this implementation against
862 # a mock server that just generates a static set of challenges.
864 # XXX qop="auth-int" supports is shaky
866 def __init__(self
, passwd
=None):
868 passwd
= HTTPPasswordMgr()
870 self
.add_password
= self
.passwd
.add_password
874 def reset_retry_count(self
):
877 def http_error_auth_reqed(self
, auth_header
, host
, req
, headers
):
878 authreq
= headers
.get(auth_header
, None)
880 # Don't fail endlessly - if we failed once, we'll probably
881 # fail a second time. Hm. Unless the Password Manager is
882 # prompting for the information. Crap. This isn't great
883 # but it's better than the current 'repeat until recursion
884 # depth exceeded' approach <wink>
885 raise HTTPError(req
.get_full_url(), 401, "digest auth failed",
890 scheme
= authreq
.split()[0]
891 if scheme
.lower() == 'digest':
892 return self
.retry_http_digest_auth(req
, authreq
)
894 def retry_http_digest_auth(self
, req
, auth
):
895 token
, challenge
= auth
.split(' ', 1)
896 chal
= parse_keqv_list(parse_http_list(challenge
))
897 auth
= self
.get_authorization(req
, chal
)
899 auth_val
= 'Digest %s' % auth
900 if req
.headers
.get(self
.auth_header
, None) == auth_val
:
902 req
.add_unredirected_header(self
.auth_header
, auth_val
)
903 resp
= self
.parent
.open(req
)
906 def get_cnonce(self
, nonce
):
907 # The cnonce-value is an opaque
908 # quoted string value provided by the client and used by both client
909 # and server to avoid chosen plaintext attacks, to provide mutual
910 # authentication, and to provide some message integrity protection.
911 # This isn't a fabulous effort, but it's probably Good Enough.
912 dig
= hashlib
.sha1("%s:%s:%s:%s" % (self
.nonce_count
, nonce
, time
.ctime(),
913 randombytes(8))).hexdigest()
916 def get_authorization(self
, req
, chal
):
918 realm
= chal
['realm']
919 nonce
= chal
['nonce']
920 qop
= chal
.get('qop')
921 algorithm
= chal
.get('algorithm', 'MD5')
922 # mod_digest doesn't send an opaque, even though it isn't
923 # supposed to be optional
924 opaque
= chal
.get('opaque', None)
928 H
, KD
= self
.get_algorithm_impls(algorithm
)
932 user
, pw
= self
.passwd
.find_user_password(realm
, req
.get_full_url())
936 # XXX not implemented yet
938 entdig
= self
.get_entity_digest(req
.get_data(), chal
)
942 A1
= "%s:%s:%s" % (user
, realm
, pw
)
943 A2
= "%s:%s" % (req
.get_method(),
944 # XXX selector: what about proxies and full urls
947 self
.nonce_count
+= 1
948 ncvalue
= '%08x' % self
.nonce_count
949 cnonce
= self
.get_cnonce(nonce
)
950 noncebit
= "%s:%s:%s:%s:%s" % (nonce
, ncvalue
, cnonce
, qop
, H(A2
))
951 respdig
= KD(H(A1
), noncebit
)
953 respdig
= KD(H(A1
), "%s:%s" % (nonce
, H(A2
)))
955 # XXX handle auth-int.
956 raise URLError("qop '%s' is not supported." % qop
)
958 # XXX should the partial digests be encoded too?
960 base
= 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
961 'response="%s"' % (user
, realm
, nonce
, req
.get_selector(),
964 base
+= ', opaque="%s"' % opaque
966 base
+= ', digest="%s"' % entdig
967 base
+= ', algorithm="%s"' % algorithm
969 base
+= ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue
, cnonce
)
972 def get_algorithm_impls(self
, algorithm
):
973 # lambdas assume digest modules are imported at the top level
974 if algorithm
== 'MD5':
975 H
= lambda x
: hashlib
.md5(x
).hexdigest()
976 elif algorithm
== 'SHA':
977 H
= lambda x
: hashlib
.sha1(x
).hexdigest()
979 KD
= lambda s
, d
: H("%s:%s" % (s
, d
))
982 def get_entity_digest(self
, data
, chal
):
983 # XXX not implemented yet
987 class HTTPDigestAuthHandler(BaseHandler
, AbstractDigestAuthHandler
):
988 """An authentication protocol defined by RFC 2069
990 Digest authentication improves on basic authentication because it
991 does not transmit passwords in the clear.
994 auth_header
= 'Authorization'
995 handler_order
= 490 # before Basic auth
997 def http_error_401(self
, req
, fp
, code
, msg
, headers
):
998 host
= urlparse
.urlparse(req
.get_full_url())[1]
999 retry
= self
.http_error_auth_reqed('www-authenticate',
1001 self
.reset_retry_count()
1005 class ProxyDigestAuthHandler(BaseHandler
, AbstractDigestAuthHandler
):
1007 auth_header
= 'Proxy-Authorization'
1008 handler_order
= 490 # before Basic auth
1010 def http_error_407(self
, req
, fp
, code
, msg
, headers
):
1011 host
= req
.get_host()
1012 retry
= self
.http_error_auth_reqed('proxy-authenticate',
1014 self
.reset_retry_count()
1017 class AbstractHTTPHandler(BaseHandler
):
1019 def __init__(self
, debuglevel
=0):
1020 self
._debuglevel
= debuglevel
1022 def set_http_debuglevel(self
, level
):
1023 self
._debuglevel
= level
1025 def do_request_(self
, request
):
1026 host
= request
.get_host()
1028 raise URLError('no host given')
1030 if request
.has_data(): # POST
1031 data
= request
.get_data()
1032 if not request
.has_header('Content-type'):
1033 request
.add_unredirected_header(
1035 'application/x-www-form-urlencoded')
1036 if not request
.has_header('Content-length'):
1037 request
.add_unredirected_header(
1038 'Content-length', '%d' % len(data
))
1040 scheme
, sel
= splittype(request
.get_selector())
1041 sel_host
, sel_path
= splithost(sel
)
1042 if not request
.has_header('Host'):
1043 request
.add_unredirected_header('Host', sel_host
or host
)
1044 for name
, value
in self
.parent
.addheaders
:
1045 name
= name
.capitalize()
1046 if not request
.has_header(name
):
1047 request
.add_unredirected_header(name
, value
)
1051 def do_open(self
, http_class
, req
):
1052 """Return an addinfourl object for the request, using http_class.
1054 http_class must implement the HTTPConnection API from httplib.
1055 The addinfourl return value is a file-like object. It also
1056 has methods and attributes including:
1057 - info(): return a mimetools.Message object for the headers
1058 - geturl(): return the original request URL
1059 - code: HTTP status code
1061 host
= req
.get_host()
1063 raise URLError('no host given')
1065 h
= http_class(host
, timeout
=req
.timeout
) # will parse host:port
1066 h
.set_debuglevel(self
._debuglevel
)
1068 headers
= dict(req
.headers
)
1069 headers
.update(req
.unredirected_hdrs
)
1070 # We want to make an HTTP/1.1 request, but the addinfourl
1071 # class isn't prepared to deal with a persistent connection.
1072 # It will try to read all remaining data from the socket,
1073 # which will block while the server waits for the next request.
1074 # So make sure the connection gets closed after the (only)
1076 headers
["Connection"] = "close"
1078 (name
.title(), val
) for name
, val
in headers
.items())
1080 h
.request(req
.get_method(), req
.get_selector(), req
.data
, headers
)
1082 except socket
.error
, err
: # XXX what error?
1085 # Pick apart the HTTPResponse object to get the addinfourl
1086 # object initialized properly.
1088 # Wrap the HTTPResponse object in socket's file object adapter
1089 # for Windows. That adapter calls recv(), so delegate recv()
1090 # to read(). This weird wrapping allows the returned object to
1091 # have readline() and readlines() methods.
1093 # XXX It might be better to extract the read buffering code
1094 # out of socket._fileobject() and into a base class.
1097 fp
= socket
._fileobject
(r
, close
=True)
1099 resp
= addinfourl(fp
, r
.msg
, req
.get_full_url())
1100 resp
.code
= r
.status
1105 class HTTPHandler(AbstractHTTPHandler
):
1107 def http_open(self
, req
):
1108 return self
.do_open(httplib
.HTTPConnection
, req
)
1110 http_request
= AbstractHTTPHandler
.do_request_
1112 if hasattr(httplib
, 'HTTPS'):
1113 class HTTPSHandler(AbstractHTTPHandler
):
1115 def https_open(self
, req
):
1116 return self
.do_open(httplib
.HTTPSConnection
, req
)
1118 https_request
= AbstractHTTPHandler
.do_request_
1120 class HTTPCookieProcessor(BaseHandler
):
1121 def __init__(self
, cookiejar
=None):
1123 if cookiejar
is None:
1124 cookiejar
= cookielib
.CookieJar()
1125 self
.cookiejar
= cookiejar
1127 def http_request(self
, request
):
1128 self
.cookiejar
.add_cookie_header(request
)
1131 def http_response(self
, request
, response
):
1132 self
.cookiejar
.extract_cookies(response
, request
)
1135 https_request
= http_request
1136 https_response
= http_response
1138 class UnknownHandler(BaseHandler
):
1139 def unknown_open(self
, req
):
1140 type = req
.get_type()
1141 raise URLError('unknown url type: %s' % type)
1143 def parse_keqv_list(l
):
1144 """Parse list of key=value strings where keys are not duplicated."""
1147 k
, v
= elt
.split('=', 1)
1148 if v
[0] == '"' and v
[-1] == '"':
1153 def parse_http_list(s
):
1154 """Parse lists as described by RFC 2068 Section 2.
1156 In particular, parse comma-separated lists where the elements of
1157 the list may include quoted-strings. A quoted-string could
1158 contain a comma. A non-quoted string could have quotes in the
1159 middle. Neither commas nor quotes count if they are escaped.
1160 Only double-quotes count, not single-quotes.
1165 escape
= quote
= False
1194 return [part
.strip() for part
in res
]
1196 class FileHandler(BaseHandler
):
1197 # Use local file or FTP depending on form of URL
1198 def file_open(self
, req
):
1199 url
= req
.get_selector()
1200 if url
[:2] == '//' and url
[2:3] != '/':
1202 return self
.parent
.open(req
)
1204 return self
.open_local_file(req
)
1206 # names for the localhost
1208 def get_names(self
):
1209 if FileHandler
.names
is None:
1211 FileHandler
.names
= (socket
.gethostbyname('localhost'),
1212 socket
.gethostbyname(socket
.gethostname()))
1213 except socket
.gaierror
:
1214 FileHandler
.names
= (socket
.gethostbyname('localhost'),)
1215 return FileHandler
.names
1217 # not entirely sure what the rules are here
1218 def open_local_file(self
, req
):
1221 host
= req
.get_host()
1222 file = req
.get_selector()
1223 localfile
= url2pathname(file)
1225 stats
= os
.stat(localfile
)
1226 size
= stats
.st_size
1227 modified
= email
.utils
.formatdate(stats
.st_mtime
, usegmt
=True)
1228 mtype
= mimetypes
.guess_type(file)[0]
1229 headers
= mimetools
.Message(StringIO(
1230 'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
1231 (mtype
or 'text/plain', size
, modified
)))
1233 host
, port
= splitport(host
)
1235 (not port
and socket
.gethostbyname(host
) in self
.get_names()):
1236 return addinfourl(open(localfile
, 'rb'),
1237 headers
, 'file:'+file)
1238 except OSError, msg
:
1239 # urllib2 users shouldn't expect OSErrors coming from urlopen()
1241 raise URLError('file not on local host')
1243 class FTPHandler(BaseHandler
):
1244 def ftp_open(self
, req
):
1247 host
= req
.get_host()
1249 raise URLError
, ('ftp error', 'no host given')
1250 host
, port
= splitport(host
)
1252 port
= ftplib
.FTP_PORT
1256 # username/password handling
1257 user
, host
= splituser(host
)
1259 user
, passwd
= splitpasswd(user
)
1262 host
= unquote(host
)
1263 user
= unquote(user
or '')
1264 passwd
= unquote(passwd
or '')
1267 host
= socket
.gethostbyname(host
)
1268 except socket
.error
, msg
:
1270 path
, attrs
= splitattr(req
.get_selector())
1271 dirs
= path
.split('/')
1272 dirs
= map(unquote
, dirs
)
1273 dirs
, file = dirs
[:-1], dirs
[-1]
1274 if dirs
and not dirs
[0]:
1277 fw
= self
.connect_ftp(user
, passwd
, host
, port
, dirs
, req
.timeout
)
1278 type = file and 'I' or 'D'
1280 attr
, value
= splitvalue(attr
)
1281 if attr
.lower() == 'type' and \
1282 value
in ('a', 'A', 'i', 'I', 'd', 'D'):
1283 type = value
.upper()
1284 fp
, retrlen
= fw
.retrfile(file, type)
1286 mtype
= mimetypes
.guess_type(req
.get_full_url())[0]
1288 headers
+= "Content-type: %s\n" % mtype
1289 if retrlen
is not None and retrlen
>= 0:
1290 headers
+= "Content-length: %d\n" % retrlen
1291 sf
= StringIO(headers
)
1292 headers
= mimetools
.Message(sf
)
1293 return addinfourl(fp
, headers
, req
.get_full_url())
1294 except ftplib
.all_errors
, msg
:
1295 raise URLError
, ('ftp error', msg
), sys
.exc_info()[2]
1297 def connect_ftp(self
, user
, passwd
, host
, port
, dirs
, timeout
):
1298 fw
= ftpwrapper(user
, passwd
, host
, port
, dirs
, timeout
)
1299 ## fw.ftp.set_debuglevel(1)
1302 class CacheFTPHandler(FTPHandler
):
1303 # XXX would be nice to have pluggable cache strategies
1304 # XXX this stuff is definitely not thread safe
1312 def setTimeout(self
, t
):
1315 def setMaxConns(self
, m
):
1318 def connect_ftp(self
, user
, passwd
, host
, port
, dirs
, timeout
):
1319 key
= user
, host
, port
, '/'.join(dirs
), timeout
1320 if key
in self
.cache
:
1321 self
.timeout
[key
] = time
.time() + self
.delay
1323 self
.cache
[key
] = ftpwrapper(user
, passwd
, host
, port
, dirs
, timeout
)
1324 self
.timeout
[key
] = time
.time() + self
.delay
1326 return self
.cache
[key
]
1328 def check_cache(self
):
1329 # first check for old ones
1331 if self
.soonest
<= t
:
1332 for k
, v
in self
.timeout
.items():
1334 self
.cache
[k
].close()
1337 self
.soonest
= min(self
.timeout
.values())
1339 # then check the size
1340 if len(self
.cache
) == self
.max_conns
:
1341 for k
, v
in self
.timeout
.items():
1342 if v
== self
.soonest
:
1346 self
.soonest
= min(self
.timeout
.values())