1 """An extensible library for opening URLs using a variety of protocols
3 The simplest way to use this module is to call the urlopen function,
4 which accepts a string containing a URL or a Request object (described
5 below). It opens the URL and returns the results as file-like
6 object; the returned object has some extra methods described below.
8 The OpenerDirector manages a collection of Handler objects that do
9 all the actual work. Each Handler implements a particular protocol or
10 option. The OpenerDirector is a composite object that invokes the
11 Handlers needed to open the requested URL. For example, the
12 HTTPHandler performs HTTP GET and POST requests and deals with
13 non-error returns. The HTTPRedirectHandler automatically deals with
14 HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
15 deals with digest authentication.
17 urlopen(url, data=None) -- Basic usage is the same as original
18 urllib. pass the url and optionally data to post to an HTTP URL, and
19 get a file-like object back. One difference is that you can also pass
20 a Request instance instead of URL. Raises a URLError (subclass of
21 IOError); for HTTP errors, raises an HTTPError, which can also be
22 treated as a valid response.
24 build_opener -- Function that creates a new OpenerDirector instance.
25 Will install the default handlers. Accepts one or more Handlers as
26 arguments, either instances or Handler classes that it will
27 instantiate. If one of the argument is a subclass of the default
28 handler, the argument will be installed instead of the default.
30 install_opener -- Installs a new opener as the default opener.
35 Request -- An object that encapsulates the state of a request. The
36 state can be as simple as the URL. It can also include extra HTTP
37 headers, e.g. a User-Agent.
42 URLError -- A subclass of IOError, individual protocols have their own
45 HTTPError -- Also a valid HTTP response, so you can treat an HTTP error
46 as an exceptional event or valid response.
49 BaseHandler and parent
50 _call_chain conventions
56 # set up authentication info
57 authinfo = urllib2.HTTPBasicAuthHandler()
58 authinfo.add_password(realm='PDQ Application',
59 uri='https://mahler:8092/site-updates.py',
61 passwd='geheim$parole')
63 proxy_support = urllib2.ProxyHandler({"http" : "http://ahad-haam:3128"})
65 # build a new opener that adds authentication and caching FTP handlers
66 opener = urllib2.build_opener(proxy_support, authinfo, urllib2.CacheFTPHandler)
69 urllib2.install_opener(opener)
71 f = urllib2.urlopen('http://www.python.org/')
77 # If an authentication error handler that tries to perform
78 # authentication for some reason but fails, how should the error be
79 # signalled? The client needs to know the HTTP error code. But if
80 # the handler knows that the problem was, e.g., that it didn't know
81 # that hash algo that requested in the challenge, it would be good to
82 # pass that information along to the client, too.
83 # ftp errors aren't handled cleanly
84 # check digest against correct (i.e. non-apache) implementation
86 # Possible extensions:
87 # complex proxies XXX not sure what exactly was meant by this
88 # abstract factory for opener
105 from cStringIO
import StringIO
107 from StringIO
import StringIO
109 from urllib
import (unwrap
, unquote
, splittype
, splithost
, quote
,
110 addinfourl
, splitport
,
111 splitattr
, ftpwrapper
, splituser
, splitpasswd
, splitvalue
)
113 # support for FileHandler, proxies via environment variables
114 from urllib
import localhost
, url2pathname
, getproxies
116 # used in User-Agent header sent
117 __version__
= sys
.version
[:3]
120 def urlopen(url
, data
=None, timeout
=socket
._GLOBAL
_DEFAULT
_TIMEOUT
):
123 _opener
= build_opener()
124 return _opener
.open(url
, data
, timeout
)
126 def install_opener(opener
):
130 # do these error classes make sense?
131 # make sure all of the IOError stuff is overridden. we just want to be
134 class URLError(IOError):
135 # URLError is a sub-type of IOError, but it doesn't share any of
136 # the implementation. need to override __init__ and __str__.
137 # It sets self.args for compatibility with other EnvironmentError
138 # subclasses, but args doesn't have the typical format with errno in
139 # slot 0 and strerror in slot 1. This may be better than nothing.
140 def __init__(self
, reason
):
145 return '<urlopen error %s>' % self
.reason
147 class HTTPError(URLError
, addinfourl
):
148 """Raised when HTTP error occurs, but also acts like non-error return"""
149 __super_init
= addinfourl
.__init
__
151 def __init__(self
, url
, code
, msg
, hdrs
, fp
):
157 # The addinfourl classes depend on fp being a valid file
158 # object. In some cases, the HTTPError may not have a valid
159 # file object. If this happens, the simplest workaround is to
160 # not initialize the base classes.
162 self
.__super
_init
(fp
, hdrs
, url
, code
)
165 return 'HTTP Error %s: %s' % (self
.code
, self
.msg
)
167 # copied from cookielib.py
168 _cut_port_re
= re
.compile(r
":\d+$")
169 def request_host(request
):
170 """Return request-host, as defined by RFC 2965.
172 Variation from RFC: returned value is lowercased, for convenient
176 url
= request
.get_full_url()
177 host
= urlparse
.urlparse(url
)[1]
179 host
= request
.get_header("Host", "")
181 # remove port, if present
182 host
= _cut_port_re
.sub("", host
, 1)
187 def __init__(self
, url
, data
=None, headers
={},
188 origin_req_host
=None, unverifiable
=False):
189 # unwrap('<URL:type://host/path>') --> 'type://host/path'
190 self
.__original
= unwrap(url
)
192 # self.__r_type is what's left after doing the splittype
195 self
._tunnel
_host
= None
198 for key
, value
in headers
.items():
199 self
.add_header(key
, value
)
200 self
.unredirected_hdrs
= {}
201 if origin_req_host
is None:
202 origin_req_host
= request_host(self
)
203 self
.origin_req_host
= origin_req_host
204 self
.unverifiable
= unverifiable
206 def __getattr__(self
, attr
):
207 # XXX this is a fallback mechanism to guard against these
208 # methods getting called in a non-standard order. this may be
209 # too complicated and/or unnecessary.
210 # XXX should the __r_XXX attributes be public?
211 if attr
[:12] == '_Request__r_':
213 if hasattr(Request
, 'get_' + name
):
214 getattr(self
, 'get_' + name
)()
215 return getattr(self
, attr
)
216 raise AttributeError, attr
218 def get_method(self
):
224 # XXX these helper methods are lame
226 def add_data(self
, data
):
230 return self
.data
is not None
235 def get_full_url(self
):
236 return self
.__original
239 if self
.type is None:
240 self
.type, self
.__r
_type
= splittype(self
.__original
)
241 if self
.type is None:
242 raise ValueError, "unknown url type: %s" % self
.__original
246 if self
.host
is None:
247 self
.host
, self
.__r
_host
= splithost(self
.__r
_type
)
249 self
.host
= unquote(self
.host
)
252 def get_selector(self
):
255 def set_proxy(self
, host
, type):
256 if self
.type == 'https' and not self
._tunnel
_host
:
257 self
._tunnel
_host
= self
.host
260 self
.__r
_host
= self
.__original
265 return self
.__r
_host
== self
.__original
267 def get_origin_req_host(self
):
268 return self
.origin_req_host
270 def is_unverifiable(self
):
271 return self
.unverifiable
273 def add_header(self
, key
, val
):
274 # useful for something like authentication
275 self
.headers
[key
.capitalize()] = val
277 def add_unredirected_header(self
, key
, val
):
278 # will not be added to a redirected request
279 self
.unredirected_hdrs
[key
.capitalize()] = val
281 def has_header(self
, header_name
):
282 return (header_name
in self
.headers
or
283 header_name
in self
.unredirected_hdrs
)
285 def get_header(self
, header_name
, default
=None):
286 return self
.headers
.get(
288 self
.unredirected_hdrs
.get(header_name
, default
))
290 def header_items(self
):
291 hdrs
= self
.unredirected_hdrs
.copy()
292 hdrs
.update(self
.headers
)
295 class OpenerDirector
:
297 client_version
= "Python-urllib/%s" % __version__
298 self
.addheaders
= [('User-agent', client_version
)]
299 # manage the individual handlers
301 self
.handle_open
= {}
302 self
.handle_error
= {}
303 self
.process_response
= {}
304 self
.process_request
= {}
306 def add_handler(self
, handler
):
307 if not hasattr(handler
, "add_parent"):
308 raise TypeError("expected BaseHandler instance, got %r" %
312 for meth
in dir(handler
):
313 if meth
in ["redirect_request", "do_open", "proxy_open"]:
314 # oops, coincidental match
319 condition
= meth
[i
+1:]
321 if condition
.startswith("error"):
322 j
= condition
.find("_") + i
+ 1
328 lookup
= self
.handle_error
.get(protocol
, {})
329 self
.handle_error
[protocol
] = lookup
330 elif condition
== "open":
332 lookup
= self
.handle_open
333 elif condition
== "response":
335 lookup
= self
.process_response
336 elif condition
== "request":
338 lookup
= self
.process_request
342 handlers
= lookup
.setdefault(kind
, [])
344 bisect
.insort(handlers
, handler
)
346 handlers
.append(handler
)
350 # the handlers must work in an specific order, the order
351 # is specified in a Handler attribute
352 bisect
.insort(self
.handlers
, handler
)
353 handler
.add_parent(self
)
356 # Only exists for backwards compatibility.
359 def _call_chain(self
, chain
, kind
, meth_name
, *args
):
360 # Handlers raise an exception if no one else should try to handle
361 # the request, or return None if they can't but another handler
362 # could. Otherwise, they return the response.
363 handlers
= chain
.get(kind
, ())
364 for handler
in handlers
:
365 func
= getattr(handler
, meth_name
)
368 if result
is not None:
371 def open(self
, fullurl
, data
=None, timeout
=socket
._GLOBAL
_DEFAULT
_TIMEOUT
):
372 # accept a URL or a Request object
373 if isinstance(fullurl
, basestring
):
374 req
= Request(fullurl
, data
)
380 req
.timeout
= timeout
381 protocol
= req
.get_type()
383 # pre-process request
384 meth_name
= protocol
+"_request"
385 for processor
in self
.process_request
.get(protocol
, []):
386 meth
= getattr(processor
, meth_name
)
389 response
= self
._open
(req
, data
)
391 # post-process response
392 meth_name
= protocol
+"_response"
393 for processor
in self
.process_response
.get(protocol
, []):
394 meth
= getattr(processor
, meth_name
)
395 response
= meth(req
, response
)
399 def _open(self
, req
, data
=None):
400 result
= self
._call
_chain
(self
.handle_open
, 'default',
405 protocol
= req
.get_type()
406 result
= self
._call
_chain
(self
.handle_open
, protocol
, protocol
+
411 return self
._call
_chain
(self
.handle_open
, 'unknown',
414 def error(self
, proto
, *args
):
415 if proto
in ('http', 'https'):
416 # XXX http[s] protocols are special-cased
417 dict = self
.handle_error
['http'] # https is not different than http
418 proto
= args
[2] # YUCK!
419 meth_name
= 'http_error_%s' % proto
423 dict = self
.handle_error
424 meth_name
= proto
+ '_error'
426 args
= (dict, proto
, meth_name
) + args
427 result
= self
._call
_chain
(*args
)
432 args
= (dict, 'default', 'http_error_default') + orig_args
433 return self
._call
_chain
(*args
)
435 # XXX probably also want an abstract factory that knows when it makes
436 # sense to skip a superclass in favor of a subclass and when it might
437 # make sense to include both
439 def build_opener(*handlers
):
440 """Create an opener object from a list of handlers.
442 The opener will use several default handlers, including support
445 If any of the handlers passed as arguments are subclasses of the
446 default handlers, the default handlers will not be used.
450 return isinstance(obj
, (types
.ClassType
, type))
452 opener
= OpenerDirector()
453 default_classes
= [ProxyHandler
, UnknownHandler
, HTTPHandler
,
454 HTTPDefaultErrorHandler
, HTTPRedirectHandler
,
455 FTPHandler
, FileHandler
, HTTPErrorProcessor
]
456 if hasattr(httplib
, 'HTTPS'):
457 default_classes
.append(HTTPSHandler
)
459 for klass
in default_classes
:
460 for check
in handlers
:
462 if issubclass(check
, klass
):
464 elif isinstance(check
, klass
):
467 default_classes
.remove(klass
)
469 for klass
in default_classes
:
470 opener
.add_handler(klass())
475 opener
.add_handler(h
)
481 def add_parent(self
, parent
):
485 # Only exists for backwards compatibility
488 def __lt__(self
, other
):
489 if not hasattr(other
, "handler_order"):
490 # Try to preserve the old behavior of having custom classes
491 # inserted after default ones (works only for custom user
492 # classes which are not aware of handler_order).
494 return self
.handler_order
< other
.handler_order
497 class HTTPErrorProcessor(BaseHandler
):
498 """Process HTTP error responses."""
499 handler_order
= 1000 # after all other processing
501 def http_response(self
, request
, response
):
502 code
, msg
, hdrs
= response
.code
, response
.msg
, response
.info()
504 # According to RFC 2616, "2xx" code indicates that the client's
505 # request was successfully received, understood, and accepted.
506 if not (200 <= code
< 300):
507 response
= self
.parent
.error(
508 'http', request
, response
, code
, msg
, hdrs
)
512 https_response
= http_response
514 class HTTPDefaultErrorHandler(BaseHandler
):
515 def http_error_default(self
, req
, fp
, code
, msg
, hdrs
):
516 raise HTTPError(req
.get_full_url(), code
, msg
, hdrs
, fp
)
518 class HTTPRedirectHandler(BaseHandler
):
519 # maximum number of redirections to any single URL
520 # this is needed because of the state that cookies introduce
522 # maximum total number of redirections (regardless of URL) before
523 # assuming we're in a loop
524 max_redirections
= 10
526 def redirect_request(self
, req
, fp
, code
, msg
, headers
, newurl
):
527 """Return a Request or None in response to a redirect.
529 This is called by the http_error_30x methods when a
530 redirection response is received. If a redirection should
531 take place, return a new Request to allow http_error_30x to
532 perform the redirect. Otherwise, raise HTTPError if no-one
533 else should try to handle this url. Return None if you can't
534 but another Handler might.
537 if (code
in (301, 302, 303, 307) and m
in ("GET", "HEAD")
538 or code
in (301, 302, 303) and m
== "POST"):
539 # Strictly (according to RFC 2616), 301 or 302 in response
540 # to a POST MUST NOT cause a redirection without confirmation
541 # from the user (of urllib2, in this case). In practice,
542 # essentially all clients do redirect in this case, so we
544 # be conciliant with URIs containing a space
545 newurl
= newurl
.replace(' ', '%20')
546 newheaders
= dict((k
,v
) for k
,v
in req
.headers
.items()
547 if k
.lower() not in ("content-length", "content-type")
549 return Request(newurl
,
551 origin_req_host
=req
.get_origin_req_host(),
554 raise HTTPError(req
.get_full_url(), code
, msg
, headers
, fp
)
556 # Implementation note: To avoid the server sending us into an
557 # infinite loop, the request object needs to track what URLs we
558 # have already seen. Do this by adding a handler-specific
559 # attribute to the Request object.
560 def http_error_302(self
, req
, fp
, code
, msg
, headers
):
561 # Some servers (incorrectly) return multiple Location headers
562 # (so probably same goes for URI). Use first header.
563 if 'location' in headers
:
564 newurl
= headers
.getheaders('location')[0]
565 elif 'uri' in headers
:
566 newurl
= headers
.getheaders('uri')[0]
570 # fix a possible malformed URL
571 urlparts
= urlparse
.urlparse(newurl
)
572 if not urlparts
.path
:
573 urlparts
= list(urlparts
)
575 newurl
= urlparse
.urlunparse(urlparts
)
577 newurl
= urlparse
.urljoin(req
.get_full_url(), newurl
)
579 # XXX Probably want to forget about the state of the current
580 # request, although that might interact poorly with other
581 # handlers that also use handler-specific request attributes
582 new
= self
.redirect_request(req
, fp
, code
, msg
, headers
, newurl
)
587 # .redirect_dict has a key url if url was previously visited.
588 if hasattr(req
, 'redirect_dict'):
589 visited
= new
.redirect_dict
= req
.redirect_dict
590 if (visited
.get(newurl
, 0) >= self
.max_repeats
or
591 len(visited
) >= self
.max_redirections
):
592 raise HTTPError(req
.get_full_url(), code
,
593 self
.inf_msg
+ msg
, headers
, fp
)
595 visited
= new
.redirect_dict
= req
.redirect_dict
= {}
596 visited
[newurl
] = visited
.get(newurl
, 0) + 1
598 # Don't close the fp until we are sure that we won't use it
603 return self
.parent
.open(new
, timeout
=req
.timeout
)
605 http_error_301
= http_error_303
= http_error_307
= http_error_302
607 inf_msg
= "The HTTP server returned a redirect error that would " \
608 "lead to an infinite loop.\n" \
609 "The last 30x error message was:\n"
612 def _parse_proxy(proxy
):
613 """Return (scheme, user, password, host/port) given a URL or an authority.
615 If a URL is supplied, it must have an authority (host:port) component.
616 According to RFC 3986, having an authority component means the URL must
617 have two slashes after the scheme:
619 >>> _parse_proxy('file:/ftp.example.com/')
620 Traceback (most recent call last):
621 ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
623 The first three items of the returned tuple may be None.
625 Examples of authority parsing:
627 >>> _parse_proxy('proxy.example.com')
628 (None, None, None, 'proxy.example.com')
629 >>> _parse_proxy('proxy.example.com:3128')
630 (None, None, None, 'proxy.example.com:3128')
632 The authority component may optionally include userinfo (assumed to be
635 >>> _parse_proxy('joe:password@proxy.example.com')
636 (None, 'joe', 'password', 'proxy.example.com')
637 >>> _parse_proxy('joe:password@proxy.example.com:3128')
638 (None, 'joe', 'password', 'proxy.example.com:3128')
640 Same examples, but with URLs instead:
642 >>> _parse_proxy('http://proxy.example.com/')
643 ('http', None, None, 'proxy.example.com')
644 >>> _parse_proxy('http://proxy.example.com:3128/')
645 ('http', None, None, 'proxy.example.com:3128')
646 >>> _parse_proxy('http://joe:password@proxy.example.com/')
647 ('http', 'joe', 'password', 'proxy.example.com')
648 >>> _parse_proxy('http://joe:password@proxy.example.com:3128')
649 ('http', 'joe', 'password', 'proxy.example.com:3128')
651 Everything after the authority is ignored:
653 >>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128')
654 ('ftp', 'joe', 'password', 'proxy.example.com')
656 Test for no trailing '/' case:
658 >>> _parse_proxy('http://joe:password@proxy.example.com')
659 ('http', 'joe', 'password', 'proxy.example.com')
662 scheme
, r_scheme
= splittype(proxy
)
663 if not r_scheme
.startswith("/"):
669 if not r_scheme
.startswith("//"):
670 raise ValueError("proxy URL with no authority: %r" % proxy
)
671 # We have an authority, so for RFC 3986-compliant URLs (by ss 3.
672 # and 3.3.), path is empty or starts with '/'
673 end
= r_scheme
.find("/", 2)
676 authority
= r_scheme
[2:end
]
677 userinfo
, hostport
= splituser(authority
)
678 if userinfo
is not None:
679 user
, password
= splitpasswd(userinfo
)
681 user
= password
= None
682 return scheme
, user
, password
, hostport
684 class ProxyHandler(BaseHandler
):
685 # Proxies must be in front
688 def __init__(self
, proxies
=None):
690 proxies
= getproxies()
691 assert hasattr(proxies
, 'has_key'), "proxies must be a mapping"
692 self
.proxies
= proxies
693 for type, url
in proxies
.items():
694 setattr(self
, '%s_open' % type,
695 lambda r
, proxy
=url
, type=type, meth
=self
.proxy_open
: \
696 meth(r
, proxy
, type))
698 def proxy_open(self
, req
, proxy
, type):
699 orig_type
= req
.get_type()
700 proxy_type
, user
, password
, hostport
= _parse_proxy(proxy
)
701 if proxy_type
is None:
702 proxy_type
= orig_type
703 if user
and password
:
704 user_pass
= '%s:%s' % (unquote(user
), unquote(password
))
705 creds
= base64
.b64encode(user_pass
).strip()
706 req
.add_header('Proxy-authorization', 'Basic ' + creds
)
707 hostport
= unquote(hostport
)
708 req
.set_proxy(hostport
, proxy_type
)
709 if orig_type
== proxy_type
or orig_type
== 'https':
710 # let other handlers take care of it
713 # need to start over, because the other handlers don't
714 # grok the proxy's URL type
715 # e.g. if we have a constructor arg proxies like so:
716 # {'http': 'ftp://proxy.example.com'}, we may end up turning
717 # a request for http://acme.example.com/a into one for
718 # ftp://proxy.example.com/a
719 return self
.parent
.open(req
, timeout
=req
.timeout
)
721 class HTTPPasswordMgr
:
726 def add_password(self
, realm
, uri
, user
, passwd
):
727 # uri could be a single URI or a sequence
728 if isinstance(uri
, basestring
):
730 if not realm
in self
.passwd
:
731 self
.passwd
[realm
] = {}
732 for default_port
in True, False:
734 [self
.reduce_uri(u
, default_port
) for u
in uri
])
735 self
.passwd
[realm
][reduced_uri
] = (user
, passwd
)
737 def find_user_password(self
, realm
, authuri
):
738 domains
= self
.passwd
.get(realm
, {})
739 for default_port
in True, False:
740 reduced_authuri
= self
.reduce_uri(authuri
, default_port
)
741 for uris
, authinfo
in domains
.iteritems():
743 if self
.is_suburi(uri
, reduced_authuri
):
747 def reduce_uri(self
, uri
, default_port
=True):
748 """Accept authority or URI and extract only the authority and path."""
749 # note HTTP URLs do not have a userinfo component
750 parts
= urlparse
.urlsplit(uri
)
755 path
= parts
[2] or '/'
761 host
, port
= splitport(authority
)
762 if default_port
and port
is None and scheme
is not None:
766 if dport
is not None:
767 authority
= "%s:%d" % (host
, dport
)
768 return authority
, path
770 def is_suburi(self
, base
, test
):
771 """Check if test is below base in a URI tree
773 Both args must be URIs in reduced form.
777 if base
[0] != test
[0]:
779 common
= posixpath
.commonprefix((base
[1], test
[1]))
780 if len(common
) == len(base
[1]):
785 class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr
):
787 def find_user_password(self
, realm
, authuri
):
788 user
, password
= HTTPPasswordMgr
.find_user_password(self
, realm
,
791 return user
, password
792 return HTTPPasswordMgr
.find_user_password(self
, None, authuri
)
795 class AbstractBasicAuthHandler
:
797 # XXX this allows for multiple auth-schemes, but will stupidly pick
798 # the last one with a realm specified.
800 # allow for double- and single-quoted realm values
801 # (single quotes are a violation of the RFC, but appear in the wild)
802 rx
= re
.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
803 'realm=(["\'])(.*?)\\2', re
.I
)
805 # XXX could pre-emptively send auth info already accepted (RFC 2617,
806 # end of section 2, and section 1.2 immediately after "credentials"
809 def __init__(self
, password_mgr
=None):
810 if password_mgr
is None:
811 password_mgr
= HTTPPasswordMgr()
812 self
.passwd
= password_mgr
813 self
.add_password
= self
.passwd
.add_password
815 def http_error_auth_reqed(self
, authreq
, host
, req
, headers
):
816 # host may be an authority (without userinfo) or a URL with an
818 # XXX could be multiple headers
819 authreq
= headers
.get(authreq
, None)
821 mo
= AbstractBasicAuthHandler
.rx
.search(authreq
)
823 scheme
, quote
, realm
= mo
.groups()
824 if scheme
.lower() == 'basic':
825 return self
.retry_http_basic_auth(host
, req
, realm
)
827 def retry_http_basic_auth(self
, host
, req
, realm
):
828 user
, pw
= self
.passwd
.find_user_password(realm
, host
)
830 raw
= "%s:%s" % (user
, pw
)
831 auth
= 'Basic %s' % base64
.b64encode(raw
).strip()
832 if req
.headers
.get(self
.auth_header
, None) == auth
:
834 req
.add_header(self
.auth_header
, auth
)
835 return self
.parent
.open(req
, timeout
=req
.timeout
)
840 class HTTPBasicAuthHandler(AbstractBasicAuthHandler
, BaseHandler
):
842 auth_header
= 'Authorization'
844 def http_error_401(self
, req
, fp
, code
, msg
, headers
):
845 url
= req
.get_full_url()
846 return self
.http_error_auth_reqed('www-authenticate',
850 class ProxyBasicAuthHandler(AbstractBasicAuthHandler
, BaseHandler
):
852 auth_header
= 'Proxy-authorization'
854 def http_error_407(self
, req
, fp
, code
, msg
, headers
):
855 # http_error_auth_reqed requires that there is no userinfo component in
856 # authority. Assume there isn't one, since urllib2 does not (and
857 # should not, RFC 3986 s. 3.2.1) support requests for URLs containing
859 authority
= req
.get_host()
860 return self
.http_error_auth_reqed('proxy-authenticate',
861 authority
, req
, headers
)
865 """Return n random bytes."""
866 # Use /dev/urandom if it is available. Fall back to random module
867 # if not. It might be worthwhile to extend this function to use
868 # other platform-specific mechanisms for getting random bytes.
869 if os
.path
.exists("/dev/urandom"):
870 f
= open("/dev/urandom")
875 L
= [chr(random
.randrange(0, 256)) for i
in range(n
)]
878 class AbstractDigestAuthHandler
:
879 # Digest authentication is specified in RFC 2617.
881 # XXX The client does not inspect the Authentication-Info header
882 # in a successful response.
884 # XXX It should be possible to test this implementation against
885 # a mock server that just generates a static set of challenges.
887 # XXX qop="auth-int" supports is shaky
889 def __init__(self
, passwd
=None):
891 passwd
= HTTPPasswordMgr()
893 self
.add_password
= self
.passwd
.add_password
897 def reset_retry_count(self
):
900 def http_error_auth_reqed(self
, auth_header
, host
, req
, headers
):
901 authreq
= headers
.get(auth_header
, None)
903 # Don't fail endlessly - if we failed once, we'll probably
904 # fail a second time. Hm. Unless the Password Manager is
905 # prompting for the information. Crap. This isn't great
906 # but it's better than the current 'repeat until recursion
907 # depth exceeded' approach <wink>
908 raise HTTPError(req
.get_full_url(), 401, "digest auth failed",
913 scheme
= authreq
.split()[0]
914 if scheme
.lower() == 'digest':
915 return self
.retry_http_digest_auth(req
, authreq
)
917 def retry_http_digest_auth(self
, req
, auth
):
918 token
, challenge
= auth
.split(' ', 1)
919 chal
= parse_keqv_list(parse_http_list(challenge
))
920 auth
= self
.get_authorization(req
, chal
)
922 auth_val
= 'Digest %s' % auth
923 if req
.headers
.get(self
.auth_header
, None) == auth_val
:
925 req
.add_unredirected_header(self
.auth_header
, auth_val
)
926 resp
= self
.parent
.open(req
, timeout
=req
.timeout
)
929 def get_cnonce(self
, nonce
):
930 # The cnonce-value is an opaque
931 # quoted string value provided by the client and used by both client
932 # and server to avoid chosen plaintext attacks, to provide mutual
933 # authentication, and to provide some message integrity protection.
934 # This isn't a fabulous effort, but it's probably Good Enough.
935 dig
= hashlib
.sha1("%s:%s:%s:%s" % (self
.nonce_count
, nonce
, time
.ctime(),
936 randombytes(8))).hexdigest()
939 def get_authorization(self
, req
, chal
):
941 realm
= chal
['realm']
942 nonce
= chal
['nonce']
943 qop
= chal
.get('qop')
944 algorithm
= chal
.get('algorithm', 'MD5')
945 # mod_digest doesn't send an opaque, even though it isn't
946 # supposed to be optional
947 opaque
= chal
.get('opaque', None)
951 H
, KD
= self
.get_algorithm_impls(algorithm
)
955 user
, pw
= self
.passwd
.find_user_password(realm
, req
.get_full_url())
959 # XXX not implemented yet
961 entdig
= self
.get_entity_digest(req
.get_data(), chal
)
965 A1
= "%s:%s:%s" % (user
, realm
, pw
)
966 A2
= "%s:%s" % (req
.get_method(),
967 # XXX selector: what about proxies and full urls
970 self
.nonce_count
+= 1
971 ncvalue
= '%08x' % self
.nonce_count
972 cnonce
= self
.get_cnonce(nonce
)
973 noncebit
= "%s:%s:%s:%s:%s" % (nonce
, ncvalue
, cnonce
, qop
, H(A2
))
974 respdig
= KD(H(A1
), noncebit
)
976 respdig
= KD(H(A1
), "%s:%s" % (nonce
, H(A2
)))
978 # XXX handle auth-int.
979 raise URLError("qop '%s' is not supported." % qop
)
981 # XXX should the partial digests be encoded too?
983 base
= 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
984 'response="%s"' % (user
, realm
, nonce
, req
.get_selector(),
987 base
+= ', opaque="%s"' % opaque
989 base
+= ', digest="%s"' % entdig
990 base
+= ', algorithm="%s"' % algorithm
992 base
+= ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue
, cnonce
)
995 def get_algorithm_impls(self
, algorithm
):
996 # algorithm should be case-insensitive according to RFC2617
997 algorithm
= algorithm
.upper()
998 # lambdas assume digest modules are imported at the top level
999 if algorithm
== 'MD5':
1000 H
= lambda x
: hashlib
.md5(x
).hexdigest()
1001 elif algorithm
== 'SHA':
1002 H
= lambda x
: hashlib
.sha1(x
).hexdigest()
1004 KD
= lambda s
, d
: H("%s:%s" % (s
, d
))
1007 def get_entity_digest(self
, data
, chal
):
1008 # XXX not implemented yet
1012 class HTTPDigestAuthHandler(BaseHandler
, AbstractDigestAuthHandler
):
1013 """An authentication protocol defined by RFC 2069
1015 Digest authentication improves on basic authentication because it
1016 does not transmit passwords in the clear.
1019 auth_header
= 'Authorization'
1020 handler_order
= 490 # before Basic auth
1022 def http_error_401(self
, req
, fp
, code
, msg
, headers
):
1023 host
= urlparse
.urlparse(req
.get_full_url())[1]
1024 retry
= self
.http_error_auth_reqed('www-authenticate',
1026 self
.reset_retry_count()
1030 class ProxyDigestAuthHandler(BaseHandler
, AbstractDigestAuthHandler
):
1032 auth_header
= 'Proxy-Authorization'
1033 handler_order
= 490 # before Basic auth
1035 def http_error_407(self
, req
, fp
, code
, msg
, headers
):
1036 host
= req
.get_host()
1037 retry
= self
.http_error_auth_reqed('proxy-authenticate',
1039 self
.reset_retry_count()
1042 class AbstractHTTPHandler(BaseHandler
):
1044 def __init__(self
, debuglevel
=0):
1045 self
._debuglevel
= debuglevel
1047 def set_http_debuglevel(self
, level
):
1048 self
._debuglevel
= level
1050 def do_request_(self
, request
):
1051 host
= request
.get_host()
1053 raise URLError('no host given')
1055 if request
.has_data(): # POST
1056 data
= request
.get_data()
1057 if not request
.has_header('Content-type'):
1058 request
.add_unredirected_header(
1060 'application/x-www-form-urlencoded')
1061 if not request
.has_header('Content-length'):
1062 request
.add_unredirected_header(
1063 'Content-length', '%d' % len(data
))
1066 if request
.has_proxy():
1067 scheme
, sel
= splittype(request
.get_selector())
1068 sel_host
, sel_path
= splithost(sel
)
1070 if not request
.has_header('Host'):
1071 request
.add_unredirected_header('Host', sel_host
)
1072 for name
, value
in self
.parent
.addheaders
:
1073 name
= name
.capitalize()
1074 if not request
.has_header(name
):
1075 request
.add_unredirected_header(name
, value
)
1079 def do_open(self
, http_class
, req
):
1080 """Return an addinfourl object for the request, using http_class.
1082 http_class must implement the HTTPConnection API from httplib.
1083 The addinfourl return value is a file-like object. It also
1084 has methods and attributes including:
1085 - info(): return a mimetools.Message object for the headers
1086 - geturl(): return the original request URL
1087 - code: HTTP status code
1089 host
= req
.get_host()
1091 raise URLError('no host given')
1093 h
= http_class(host
, timeout
=req
.timeout
) # will parse host:port
1094 h
.set_debuglevel(self
._debuglevel
)
1096 headers
= dict(req
.headers
)
1097 headers
.update(req
.unredirected_hdrs
)
1098 # We want to make an HTTP/1.1 request, but the addinfourl
1099 # class isn't prepared to deal with a persistent connection.
1100 # It will try to read all remaining data from the socket,
1101 # which will block while the server waits for the next request.
1102 # So make sure the connection gets closed after the (only)
1104 headers
["Connection"] = "close"
1106 (name
.title(), val
) for name
, val
in headers
.items())
1108 if req
._tunnel
_host
:
1109 h
.set_tunnel(req
._tunnel
_host
)
1112 h
.request(req
.get_method(), req
.get_selector(), req
.data
, headers
)
1114 r
= h
.getresponse(buffering
=True)
1115 except TypeError: #buffering kw not supported
1117 except socket
.error
, err
: # XXX what error?
1120 # Pick apart the HTTPResponse object to get the addinfourl
1121 # object initialized properly.
1123 # Wrap the HTTPResponse object in socket's file object adapter
1124 # for Windows. That adapter calls recv(), so delegate recv()
1125 # to read(). This weird wrapping allows the returned object to
1126 # have readline() and readlines() methods.
1128 # XXX It might be better to extract the read buffering code
1129 # out of socket._fileobject() and into a base class.
1132 fp
= socket
._fileobject
(r
, close
=True)
1134 resp
= addinfourl(fp
, r
.msg
, req
.get_full_url())
1135 resp
.code
= r
.status
1140 class HTTPHandler(AbstractHTTPHandler
):
1142 def http_open(self
, req
):
1143 return self
.do_open(httplib
.HTTPConnection
, req
)
1145 http_request
= AbstractHTTPHandler
.do_request_
1147 if hasattr(httplib
, 'HTTPS'):
1148 class HTTPSHandler(AbstractHTTPHandler
):
1150 def https_open(self
, req
):
1151 return self
.do_open(httplib
.HTTPSConnection
, req
)
1153 https_request
= AbstractHTTPHandler
.do_request_
1155 class HTTPCookieProcessor(BaseHandler
):
1156 def __init__(self
, cookiejar
=None):
1158 if cookiejar
is None:
1159 cookiejar
= cookielib
.CookieJar()
1160 self
.cookiejar
= cookiejar
1162 def http_request(self
, request
):
1163 self
.cookiejar
.add_cookie_header(request
)
1166 def http_response(self
, request
, response
):
1167 self
.cookiejar
.extract_cookies(response
, request
)
1170 https_request
= http_request
1171 https_response
= http_response
1173 class UnknownHandler(BaseHandler
):
1174 def unknown_open(self
, req
):
1175 type = req
.get_type()
1176 raise URLError('unknown url type: %s' % type)
1178 def parse_keqv_list(l
):
1179 """Parse list of key=value strings where keys are not duplicated."""
1182 k
, v
= elt
.split('=', 1)
1183 if v
[0] == '"' and v
[-1] == '"':
1188 def parse_http_list(s
):
1189 """Parse lists as described by RFC 2068 Section 2.
1191 In particular, parse comma-separated lists where the elements of
1192 the list may include quoted-strings. A quoted-string could
1193 contain a comma. A non-quoted string could have quotes in the
1194 middle. Neither commas nor quotes count if they are escaped.
1195 Only double-quotes count, not single-quotes.
1200 escape
= quote
= False
1229 return [part
.strip() for part
in res
]
1231 class FileHandler(BaseHandler
):
1232 # Use local file or FTP depending on form of URL
1233 def file_open(self
, req
):
1234 url
= req
.get_selector()
1235 if url
[:2] == '//' and url
[2:3] != '/':
1237 return self
.parent
.open(req
)
1239 return self
.open_local_file(req
)
1241 # names for the localhost
1243 def get_names(self
):
1244 if FileHandler
.names
is None:
1246 FileHandler
.names
= (socket
.gethostbyname('localhost'),
1247 socket
.gethostbyname(socket
.gethostname()))
1248 except socket
.gaierror
:
1249 FileHandler
.names
= (socket
.gethostbyname('localhost'),)
1250 return FileHandler
.names
1252 # not entirely sure what the rules are here
1253 def open_local_file(self
, req
):
1256 host
= req
.get_host()
1257 file = req
.get_selector()
1258 localfile
= url2pathname(file)
1260 stats
= os
.stat(localfile
)
1261 size
= stats
.st_size
1262 modified
= email
.utils
.formatdate(stats
.st_mtime
, usegmt
=True)
1263 mtype
= mimetypes
.guess_type(file)[0]
1264 headers
= mimetools
.Message(StringIO(
1265 'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
1266 (mtype
or 'text/plain', size
, modified
)))
1268 host
, port
= splitport(host
)
1270 (not port
and socket
.gethostbyname(host
) in self
.get_names()):
1271 return addinfourl(open(localfile
, 'rb'),
1272 headers
, 'file:'+file)
1273 except OSError, msg
:
1274 # urllib2 users shouldn't expect OSErrors coming from urlopen()
1276 raise URLError('file not on local host')
1278 class FTPHandler(BaseHandler
):
1279 def ftp_open(self
, req
):
1282 host
= req
.get_host()
1284 raise URLError('ftp error: no host given')
1285 host
, port
= splitport(host
)
1287 port
= ftplib
.FTP_PORT
1291 # username/password handling
1292 user
, host
= splituser(host
)
1294 user
, passwd
= splitpasswd(user
)
1297 host
= unquote(host
)
1298 user
= unquote(user
or '')
1299 passwd
= unquote(passwd
or '')
1302 host
= socket
.gethostbyname(host
)
1303 except socket
.error
, msg
:
1305 path
, attrs
= splitattr(req
.get_selector())
1306 dirs
= path
.split('/')
1307 dirs
= map(unquote
, dirs
)
1308 dirs
, file = dirs
[:-1], dirs
[-1]
1309 if dirs
and not dirs
[0]:
1312 fw
= self
.connect_ftp(user
, passwd
, host
, port
, dirs
, req
.timeout
)
1313 type = file and 'I' or 'D'
1315 attr
, value
= splitvalue(attr
)
1316 if attr
.lower() == 'type' and \
1317 value
in ('a', 'A', 'i', 'I', 'd', 'D'):
1318 type = value
.upper()
1319 fp
, retrlen
= fw
.retrfile(file, type)
1321 mtype
= mimetypes
.guess_type(req
.get_full_url())[0]
1323 headers
+= "Content-type: %s\n" % mtype
1324 if retrlen
is not None and retrlen
>= 0:
1325 headers
+= "Content-length: %d\n" % retrlen
1326 sf
= StringIO(headers
)
1327 headers
= mimetools
.Message(sf
)
1328 return addinfourl(fp
, headers
, req
.get_full_url())
1329 except ftplib
.all_errors
, msg
:
1330 raise URLError
, ('ftp error: %s' % msg
), sys
.exc_info()[2]
1332 def connect_ftp(self
, user
, passwd
, host
, port
, dirs
, timeout
):
1333 fw
= ftpwrapper(user
, passwd
, host
, port
, dirs
, timeout
)
1334 ## fw.ftp.set_debuglevel(1)
1337 class CacheFTPHandler(FTPHandler
):
1338 # XXX would be nice to have pluggable cache strategies
1339 # XXX this stuff is definitely not thread safe
1347 def setTimeout(self
, t
):
1350 def setMaxConns(self
, m
):
1353 def connect_ftp(self
, user
, passwd
, host
, port
, dirs
, timeout
):
1354 key
= user
, host
, port
, '/'.join(dirs
), timeout
1355 if key
in self
.cache
:
1356 self
.timeout
[key
] = time
.time() + self
.delay
1358 self
.cache
[key
] = ftpwrapper(user
, passwd
, host
, port
, dirs
, timeout
)
1359 self
.timeout
[key
] = time
.time() + self
.delay
1361 return self
.cache
[key
]
1363 def check_cache(self
):
1364 # first check for old ones
1366 if self
.soonest
<= t
:
1367 for k
, v
in self
.timeout
.items():
1369 self
.cache
[k
].close()
1372 self
.soonest
= min(self
.timeout
.values())
1374 # then check the size
1375 if len(self
.cache
) == self
.max_conns
:
1376 for k
, v
in self
.timeout
.items():
1377 if v
== self
.soonest
:
1381 self
.soonest
= min(self
.timeout
.values())