1 """An extensible library for opening URLs using a variety of protocols
3 The simplest way to use this module is to call the urlopen function,
4 which accepts a string containing a URL or a Request object (described
5 below). It opens the URL and returns the results as file-like
6 object; the returned object has some extra methods described below.
8 The OpenerDirector manages a collection of Handler objects that do
9 all the actual work. Each Handler implements a particular protocol or
10 option. The OpenerDirector is a composite object that invokes the
11 Handlers needed to open the requested URL. For example, the
12 HTTPHandler performs HTTP GET and POST requests and deals with
13 non-error returns. The HTTPRedirectHandler automatically deals with
14 HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
15 deals with digest authentication.
17 urlopen(url, data=None) -- Basic usage is the same as original
18 urllib. pass the url and optionally data to post to an HTTP URL, and
19 get a file-like object back. One difference is that you can also pass
20 a Request instance instead of URL. Raises a URLError (subclass of
21 IOError); for HTTP errors, raises an HTTPError, which can also be
22 treated as a valid response.
24 build_opener -- Function that creates a new OpenerDirector instance.
25 Will install the default handlers. Accepts one or more Handlers as
26 arguments, either instances or Handler classes that it will
27 instantiate. If one of the argument is a subclass of the default
28 handler, the argument will be installed instead of the default.
30 install_opener -- Installs a new opener as the default opener.
34 OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages
35 the Handler classes, while dealing with requests and responses.
37 Request -- An object that encapsulates the state of a request. The
38 state can be as simple as the URL. It can also include extra HTTP
39 headers, e.g. a User-Agent.
44 URLError -- A subclass of IOError, individual protocols have their own
47 HTTPError -- Also a valid HTTP response, so you can treat an HTTP error
48 as an exceptional event or valid response.
51 BaseHandler and parent
52 _call_chain conventions
58 # set up authentication info
59 authinfo = urllib2.HTTPBasicAuthHandler()
60 authinfo.add_password(realm='PDQ Application',
61 uri='https://mahler:8092/site-updates.py',
63 passwd='geheim$parole')
65 proxy_support = urllib2.ProxyHandler({"http" : "http://ahad-haam:3128"})
67 # build a new opener that adds authentication and caching FTP handlers
68 opener = urllib2.build_opener(proxy_support, authinfo, urllib2.CacheFTPHandler)
71 urllib2.install_opener(opener)
73 f = urllib2.urlopen('http://www.python.org/')
79 # If an authentication error handler that tries to perform
80 # authentication for some reason but fails, how should the error be
81 # signalled? The client needs to know the HTTP error code. But if
82 # the handler knows that the problem was, e.g., that it didn't know
83 # that hash algo that requested in the challenge, it would be good to
84 # pass that information along to the client, too.
85 # ftp errors aren't handled cleanly
86 # check digest against correct (i.e. non-apache) implementation
88 # Possible extensions:
89 # complex proxies XXX not sure what exactly was meant by this
90 # abstract factory for opener
107 from cStringIO
import StringIO
109 from StringIO
import StringIO
111 from urllib
import (unwrap
, unquote
, splittype
, splithost
, quote
,
112 addinfourl
, splitport
,
113 splitattr
, ftpwrapper
, splituser
, splitpasswd
, splitvalue
)
115 # support for FileHandler, proxies via environment variables
116 from urllib
import localhost
, url2pathname
, getproxies
, proxy_bypass
118 # used in User-Agent header sent
119 __version__
= sys
.version
[:3]
122 def urlopen(url
, data
=None, timeout
=socket
._GLOBAL
_DEFAULT
_TIMEOUT
):
125 _opener
= build_opener()
126 return _opener
.open(url
, data
, timeout
)
128 def install_opener(opener
):
132 # do these error classes make sense?
133 # make sure all of the IOError stuff is overridden. we just want to be
136 class URLError(IOError):
137 # URLError is a sub-type of IOError, but it doesn't share any of
138 # the implementation. need to override __init__ and __str__.
139 # It sets self.args for compatibility with other EnvironmentError
140 # subclasses, but args doesn't have the typical format with errno in
141 # slot 0 and strerror in slot 1. This may be better than nothing.
142 def __init__(self
, reason
):
147 return '<urlopen error %s>' % self
.reason
149 class HTTPError(URLError
, addinfourl
):
150 """Raised when HTTP error occurs, but also acts like non-error return"""
151 __super_init
= addinfourl
.__init
__
153 def __init__(self
, url
, code
, msg
, hdrs
, fp
):
159 # The addinfourl classes depend on fp being a valid file
160 # object. In some cases, the HTTPError may not have a valid
161 # file object. If this happens, the simplest workaround is to
162 # not initialize the base classes.
164 self
.__super
_init
(fp
, hdrs
, url
, code
)
167 return 'HTTP Error %s: %s' % (self
.code
, self
.msg
)
169 # copied from cookielib.py
170 _cut_port_re
= re
.compile(r
":\d+$")
171 def request_host(request
):
172 """Return request-host, as defined by RFC 2965.
174 Variation from RFC: returned value is lowercased, for convenient
178 url
= request
.get_full_url()
179 host
= urlparse
.urlparse(url
)[1]
181 host
= request
.get_header("Host", "")
183 # remove port, if present
184 host
= _cut_port_re
.sub("", host
, 1)
189 def __init__(self
, url
, data
=None, headers
={},
190 origin_req_host
=None, unverifiable
=False):
191 # unwrap('<URL:type://host/path>') --> 'type://host/path'
192 self
.__original
= unwrap(url
)
194 # self.__r_type is what's left after doing the splittype
197 self
._tunnel
_host
= None
200 for key
, value
in headers
.items():
201 self
.add_header(key
, value
)
202 self
.unredirected_hdrs
= {}
203 if origin_req_host
is None:
204 origin_req_host
= request_host(self
)
205 self
.origin_req_host
= origin_req_host
206 self
.unverifiable
= unverifiable
208 def __getattr__(self
, attr
):
209 # XXX this is a fallback mechanism to guard against these
210 # methods getting called in a non-standard order. this may be
211 # too complicated and/or unnecessary.
212 # XXX should the __r_XXX attributes be public?
213 if attr
[:12] == '_Request__r_':
215 if hasattr(Request
, 'get_' + name
):
216 getattr(self
, 'get_' + name
)()
217 return getattr(self
, attr
)
218 raise AttributeError, attr
220 def get_method(self
):
226 # XXX these helper methods are lame
228 def add_data(self
, data
):
232 return self
.data
is not None
237 def get_full_url(self
):
238 return self
.__original
241 if self
.type is None:
242 self
.type, self
.__r
_type
= splittype(self
.__original
)
243 if self
.type is None:
244 raise ValueError, "unknown url type: %s" % self
.__original
248 if self
.host
is None:
249 self
.host
, self
.__r
_host
= splithost(self
.__r
_type
)
251 self
.host
= unquote(self
.host
)
254 def get_selector(self
):
257 def set_proxy(self
, host
, type):
258 if self
.type == 'https' and not self
._tunnel
_host
:
259 self
._tunnel
_host
= self
.host
262 self
.__r
_host
= self
.__original
267 return self
.__r
_host
== self
.__original
269 def get_origin_req_host(self
):
270 return self
.origin_req_host
272 def is_unverifiable(self
):
273 return self
.unverifiable
275 def add_header(self
, key
, val
):
276 # useful for something like authentication
277 self
.headers
[key
.capitalize()] = val
279 def add_unredirected_header(self
, key
, val
):
280 # will not be added to a redirected request
281 self
.unredirected_hdrs
[key
.capitalize()] = val
283 def has_header(self
, header_name
):
284 return (header_name
in self
.headers
or
285 header_name
in self
.unredirected_hdrs
)
287 def get_header(self
, header_name
, default
=None):
288 return self
.headers
.get(
290 self
.unredirected_hdrs
.get(header_name
, default
))
292 def header_items(self
):
293 hdrs
= self
.unredirected_hdrs
.copy()
294 hdrs
.update(self
.headers
)
297 class OpenerDirector
:
299 client_version
= "Python-urllib/%s" % __version__
300 self
.addheaders
= [('User-agent', client_version
)]
301 # manage the individual handlers
303 self
.handle_open
= {}
304 self
.handle_error
= {}
305 self
.process_response
= {}
306 self
.process_request
= {}
308 def add_handler(self
, handler
):
309 if not hasattr(handler
, "add_parent"):
310 raise TypeError("expected BaseHandler instance, got %r" %
314 for meth
in dir(handler
):
315 if meth
in ["redirect_request", "do_open", "proxy_open"]:
316 # oops, coincidental match
321 condition
= meth
[i
+1:]
323 if condition
.startswith("error"):
324 j
= condition
.find("_") + i
+ 1
330 lookup
= self
.handle_error
.get(protocol
, {})
331 self
.handle_error
[protocol
] = lookup
332 elif condition
== "open":
334 lookup
= self
.handle_open
335 elif condition
== "response":
337 lookup
= self
.process_response
338 elif condition
== "request":
340 lookup
= self
.process_request
344 handlers
= lookup
.setdefault(kind
, [])
346 bisect
.insort(handlers
, handler
)
348 handlers
.append(handler
)
352 # the handlers must work in an specific order, the order
353 # is specified in a Handler attribute
354 bisect
.insort(self
.handlers
, handler
)
355 handler
.add_parent(self
)
358 # Only exists for backwards compatibility.
361 def _call_chain(self
, chain
, kind
, meth_name
, *args
):
362 # Handlers raise an exception if no one else should try to handle
363 # the request, or return None if they can't but another handler
364 # could. Otherwise, they return the response.
365 handlers
= chain
.get(kind
, ())
366 for handler
in handlers
:
367 func
= getattr(handler
, meth_name
)
370 if result
is not None:
373 def open(self
, fullurl
, data
=None, timeout
=socket
._GLOBAL
_DEFAULT
_TIMEOUT
):
374 # accept a URL or a Request object
375 if isinstance(fullurl
, basestring
):
376 req
= Request(fullurl
, data
)
382 req
.timeout
= timeout
383 protocol
= req
.get_type()
385 # pre-process request
386 meth_name
= protocol
+"_request"
387 for processor
in self
.process_request
.get(protocol
, []):
388 meth
= getattr(processor
, meth_name
)
391 response
= self
._open
(req
, data
)
393 # post-process response
394 meth_name
= protocol
+"_response"
395 for processor
in self
.process_response
.get(protocol
, []):
396 meth
= getattr(processor
, meth_name
)
397 response
= meth(req
, response
)
401 def _open(self
, req
, data
=None):
402 result
= self
._call
_chain
(self
.handle_open
, 'default',
407 protocol
= req
.get_type()
408 result
= self
._call
_chain
(self
.handle_open
, protocol
, protocol
+
413 return self
._call
_chain
(self
.handle_open
, 'unknown',
416 def error(self
, proto
, *args
):
417 if proto
in ('http', 'https'):
418 # XXX http[s] protocols are special-cased
419 dict = self
.handle_error
['http'] # https is not different than http
420 proto
= args
[2] # YUCK!
421 meth_name
= 'http_error_%s' % proto
425 dict = self
.handle_error
426 meth_name
= proto
+ '_error'
428 args
= (dict, proto
, meth_name
) + args
429 result
= self
._call
_chain
(*args
)
434 args
= (dict, 'default', 'http_error_default') + orig_args
435 return self
._call
_chain
(*args
)
437 # XXX probably also want an abstract factory that knows when it makes
438 # sense to skip a superclass in favor of a subclass and when it might
439 # make sense to include both
441 def build_opener(*handlers
):
442 """Create an opener object from a list of handlers.
444 The opener will use several default handlers, including support
445 for HTTP, FTP and when applicable, HTTPS.
447 If any of the handlers passed as arguments are subclasses of the
448 default handlers, the default handlers will not be used.
452 return isinstance(obj
, (types
.ClassType
, type))
454 opener
= OpenerDirector()
455 default_classes
= [ProxyHandler
, UnknownHandler
, HTTPHandler
,
456 HTTPDefaultErrorHandler
, HTTPRedirectHandler
,
457 FTPHandler
, FileHandler
, HTTPErrorProcessor
]
458 if hasattr(httplib
, 'HTTPS'):
459 default_classes
.append(HTTPSHandler
)
461 for klass
in default_classes
:
462 for check
in handlers
:
464 if issubclass(check
, klass
):
466 elif isinstance(check
, klass
):
469 default_classes
.remove(klass
)
471 for klass
in default_classes
:
472 opener
.add_handler(klass())
477 opener
.add_handler(h
)
483 def add_parent(self
, parent
):
487 # Only exists for backwards compatibility
490 def __lt__(self
, other
):
491 if not hasattr(other
, "handler_order"):
492 # Try to preserve the old behavior of having custom classes
493 # inserted after default ones (works only for custom user
494 # classes which are not aware of handler_order).
496 return self
.handler_order
< other
.handler_order
499 class HTTPErrorProcessor(BaseHandler
):
500 """Process HTTP error responses."""
501 handler_order
= 1000 # after all other processing
503 def http_response(self
, request
, response
):
504 code
, msg
, hdrs
= response
.code
, response
.msg
, response
.info()
506 # According to RFC 2616, "2xx" code indicates that the client's
507 # request was successfully received, understood, and accepted.
508 if not (200 <= code
< 300):
509 response
= self
.parent
.error(
510 'http', request
, response
, code
, msg
, hdrs
)
514 https_response
= http_response
516 class HTTPDefaultErrorHandler(BaseHandler
):
517 def http_error_default(self
, req
, fp
, code
, msg
, hdrs
):
518 raise HTTPError(req
.get_full_url(), code
, msg
, hdrs
, fp
)
520 class HTTPRedirectHandler(BaseHandler
):
521 # maximum number of redirections to any single URL
522 # this is needed because of the state that cookies introduce
524 # maximum total number of redirections (regardless of URL) before
525 # assuming we're in a loop
526 max_redirections
= 10
528 def redirect_request(self
, req
, fp
, code
, msg
, headers
, newurl
):
529 """Return a Request or None in response to a redirect.
531 This is called by the http_error_30x methods when a
532 redirection response is received. If a redirection should
533 take place, return a new Request to allow http_error_30x to
534 perform the redirect. Otherwise, raise HTTPError if no-one
535 else should try to handle this url. Return None if you can't
536 but another Handler might.
539 if (code
in (301, 302, 303, 307) and m
in ("GET", "HEAD")
540 or code
in (301, 302, 303) and m
== "POST"):
541 # Strictly (according to RFC 2616), 301 or 302 in response
542 # to a POST MUST NOT cause a redirection without confirmation
543 # from the user (of urllib2, in this case). In practice,
544 # essentially all clients do redirect in this case, so we
546 # be conciliant with URIs containing a space
547 newurl
= newurl
.replace(' ', '%20')
548 newheaders
= dict((k
,v
) for k
,v
in req
.headers
.items()
549 if k
.lower() not in ("content-length", "content-type")
551 return Request(newurl
,
553 origin_req_host
=req
.get_origin_req_host(),
556 raise HTTPError(req
.get_full_url(), code
, msg
, headers
, fp
)
558 # Implementation note: To avoid the server sending us into an
559 # infinite loop, the request object needs to track what URLs we
560 # have already seen. Do this by adding a handler-specific
561 # attribute to the Request object.
562 def http_error_302(self
, req
, fp
, code
, msg
, headers
):
563 # Some servers (incorrectly) return multiple Location headers
564 # (so probably same goes for URI). Use first header.
565 if 'location' in headers
:
566 newurl
= headers
.getheaders('location')[0]
567 elif 'uri' in headers
:
568 newurl
= headers
.getheaders('uri')[0]
572 # fix a possible malformed URL
573 urlparts
= urlparse
.urlparse(newurl
)
574 if not urlparts
.path
:
575 urlparts
= list(urlparts
)
577 newurl
= urlparse
.urlunparse(urlparts
)
579 newurl
= urlparse
.urljoin(req
.get_full_url(), newurl
)
581 # XXX Probably want to forget about the state of the current
582 # request, although that might interact poorly with other
583 # handlers that also use handler-specific request attributes
584 new
= self
.redirect_request(req
, fp
, code
, msg
, headers
, newurl
)
589 # .redirect_dict has a key url if url was previously visited.
590 if hasattr(req
, 'redirect_dict'):
591 visited
= new
.redirect_dict
= req
.redirect_dict
592 if (visited
.get(newurl
, 0) >= self
.max_repeats
or
593 len(visited
) >= self
.max_redirections
):
594 raise HTTPError(req
.get_full_url(), code
,
595 self
.inf_msg
+ msg
, headers
, fp
)
597 visited
= new
.redirect_dict
= req
.redirect_dict
= {}
598 visited
[newurl
] = visited
.get(newurl
, 0) + 1
600 # Don't close the fp until we are sure that we won't use it
605 return self
.parent
.open(new
, timeout
=req
.timeout
)
607 http_error_301
= http_error_303
= http_error_307
= http_error_302
609 inf_msg
= "The HTTP server returned a redirect error that would " \
610 "lead to an infinite loop.\n" \
611 "The last 30x error message was:\n"
614 def _parse_proxy(proxy
):
615 """Return (scheme, user, password, host/port) given a URL or an authority.
617 If a URL is supplied, it must have an authority (host:port) component.
618 According to RFC 3986, having an authority component means the URL must
619 have two slashes after the scheme:
621 >>> _parse_proxy('file:/ftp.example.com/')
622 Traceback (most recent call last):
623 ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
625 The first three items of the returned tuple may be None.
627 Examples of authority parsing:
629 >>> _parse_proxy('proxy.example.com')
630 (None, None, None, 'proxy.example.com')
631 >>> _parse_proxy('proxy.example.com:3128')
632 (None, None, None, 'proxy.example.com:3128')
634 The authority component may optionally include userinfo (assumed to be
637 >>> _parse_proxy('joe:password@proxy.example.com')
638 (None, 'joe', 'password', 'proxy.example.com')
639 >>> _parse_proxy('joe:password@proxy.example.com:3128')
640 (None, 'joe', 'password', 'proxy.example.com:3128')
642 Same examples, but with URLs instead:
644 >>> _parse_proxy('http://proxy.example.com/')
645 ('http', None, None, 'proxy.example.com')
646 >>> _parse_proxy('http://proxy.example.com:3128/')
647 ('http', None, None, 'proxy.example.com:3128')
648 >>> _parse_proxy('http://joe:password@proxy.example.com/')
649 ('http', 'joe', 'password', 'proxy.example.com')
650 >>> _parse_proxy('http://joe:password@proxy.example.com:3128')
651 ('http', 'joe', 'password', 'proxy.example.com:3128')
653 Everything after the authority is ignored:
655 >>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128')
656 ('ftp', 'joe', 'password', 'proxy.example.com')
658 Test for no trailing '/' case:
660 >>> _parse_proxy('http://joe:password@proxy.example.com')
661 ('http', 'joe', 'password', 'proxy.example.com')
664 scheme
, r_scheme
= splittype(proxy
)
665 if not r_scheme
.startswith("/"):
671 if not r_scheme
.startswith("//"):
672 raise ValueError("proxy URL with no authority: %r" % proxy
)
673 # We have an authority, so for RFC 3986-compliant URLs (by ss 3.
674 # and 3.3.), path is empty or starts with '/'
675 end
= r_scheme
.find("/", 2)
678 authority
= r_scheme
[2:end
]
679 userinfo
, hostport
= splituser(authority
)
680 if userinfo
is not None:
681 user
, password
= splitpasswd(userinfo
)
683 user
= password
= None
684 return scheme
, user
, password
, hostport
686 class ProxyHandler(BaseHandler
):
687 # Proxies must be in front
690 def __init__(self
, proxies
=None):
692 proxies
= getproxies()
693 assert hasattr(proxies
, 'has_key'), "proxies must be a mapping"
694 self
.proxies
= proxies
695 for type, url
in proxies
.items():
696 setattr(self
, '%s_open' % type,
697 lambda r
, proxy
=url
, type=type, meth
=self
.proxy_open
: \
698 meth(r
, proxy
, type))
700 def proxy_open(self
, req
, proxy
, type):
701 orig_type
= req
.get_type()
702 proxy_type
, user
, password
, hostport
= _parse_proxy(proxy
)
704 if proxy_type
is None:
705 proxy_type
= orig_type
707 if req
.host
and proxy_bypass(req
.host
):
710 if user
and password
:
711 user_pass
= '%s:%s' % (unquote(user
), unquote(password
))
712 creds
= base64
.b64encode(user_pass
).strip()
713 req
.add_header('Proxy-authorization', 'Basic ' + creds
)
714 hostport
= unquote(hostport
)
715 req
.set_proxy(hostport
, proxy_type
)
717 if orig_type
== proxy_type
or orig_type
== 'https':
718 # let other handlers take care of it
721 # need to start over, because the other handlers don't
722 # grok the proxy's URL type
723 # e.g. if we have a constructor arg proxies like so:
724 # {'http': 'ftp://proxy.example.com'}, we may end up turning
725 # a request for http://acme.example.com/a into one for
726 # ftp://proxy.example.com/a
727 return self
.parent
.open(req
, timeout
=req
.timeout
)
729 class HTTPPasswordMgr
:
734 def add_password(self
, realm
, uri
, user
, passwd
):
735 # uri could be a single URI or a sequence
736 if isinstance(uri
, basestring
):
738 if not realm
in self
.passwd
:
739 self
.passwd
[realm
] = {}
740 for default_port
in True, False:
742 [self
.reduce_uri(u
, default_port
) for u
in uri
])
743 self
.passwd
[realm
][reduced_uri
] = (user
, passwd
)
745 def find_user_password(self
, realm
, authuri
):
746 domains
= self
.passwd
.get(realm
, {})
747 for default_port
in True, False:
748 reduced_authuri
= self
.reduce_uri(authuri
, default_port
)
749 for uris
, authinfo
in domains
.iteritems():
751 if self
.is_suburi(uri
, reduced_authuri
):
755 def reduce_uri(self
, uri
, default_port
=True):
756 """Accept authority or URI and extract only the authority and path."""
757 # note HTTP URLs do not have a userinfo component
758 parts
= urlparse
.urlsplit(uri
)
763 path
= parts
[2] or '/'
769 host
, port
= splitport(authority
)
770 if default_port
and port
is None and scheme
is not None:
774 if dport
is not None:
775 authority
= "%s:%d" % (host
, dport
)
776 return authority
, path
778 def is_suburi(self
, base
, test
):
779 """Check if test is below base in a URI tree
781 Both args must be URIs in reduced form.
785 if base
[0] != test
[0]:
787 common
= posixpath
.commonprefix((base
[1], test
[1]))
788 if len(common
) == len(base
[1]):
793 class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr
):
795 def find_user_password(self
, realm
, authuri
):
796 user
, password
= HTTPPasswordMgr
.find_user_password(self
, realm
,
799 return user
, password
800 return HTTPPasswordMgr
.find_user_password(self
, None, authuri
)
803 class AbstractBasicAuthHandler
:
805 # XXX this allows for multiple auth-schemes, but will stupidly pick
806 # the last one with a realm specified.
808 # allow for double- and single-quoted realm values
809 # (single quotes are a violation of the RFC, but appear in the wild)
810 rx
= re
.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
811 'realm=(["\'])(.*?)\\2', re
.I
)
813 # XXX could pre-emptively send auth info already accepted (RFC 2617,
814 # end of section 2, and section 1.2 immediately after "credentials"
817 def __init__(self
, password_mgr
=None):
818 if password_mgr
is None:
819 password_mgr
= HTTPPasswordMgr()
820 self
.passwd
= password_mgr
821 self
.add_password
= self
.passwd
.add_password
823 def http_error_auth_reqed(self
, authreq
, host
, req
, headers
):
824 # host may be an authority (without userinfo) or a URL with an
826 # XXX could be multiple headers
827 authreq
= headers
.get(authreq
, None)
829 mo
= AbstractBasicAuthHandler
.rx
.search(authreq
)
831 scheme
, quote
, realm
= mo
.groups()
832 if scheme
.lower() == 'basic':
833 return self
.retry_http_basic_auth(host
, req
, realm
)
835 def retry_http_basic_auth(self
, host
, req
, realm
):
836 user
, pw
= self
.passwd
.find_user_password(realm
, host
)
838 raw
= "%s:%s" % (user
, pw
)
839 auth
= 'Basic %s' % base64
.b64encode(raw
).strip()
840 if req
.headers
.get(self
.auth_header
, None) == auth
:
842 req
.add_header(self
.auth_header
, auth
)
843 return self
.parent
.open(req
, timeout
=req
.timeout
)
848 class HTTPBasicAuthHandler(AbstractBasicAuthHandler
, BaseHandler
):
850 auth_header
= 'Authorization'
852 def http_error_401(self
, req
, fp
, code
, msg
, headers
):
853 url
= req
.get_full_url()
854 return self
.http_error_auth_reqed('www-authenticate',
858 class ProxyBasicAuthHandler(AbstractBasicAuthHandler
, BaseHandler
):
860 auth_header
= 'Proxy-authorization'
862 def http_error_407(self
, req
, fp
, code
, msg
, headers
):
863 # http_error_auth_reqed requires that there is no userinfo component in
864 # authority. Assume there isn't one, since urllib2 does not (and
865 # should not, RFC 3986 s. 3.2.1) support requests for URLs containing
867 authority
= req
.get_host()
868 return self
.http_error_auth_reqed('proxy-authenticate',
869 authority
, req
, headers
)
873 """Return n random bytes."""
874 # Use /dev/urandom if it is available. Fall back to random module
875 # if not. It might be worthwhile to extend this function to use
876 # other platform-specific mechanisms for getting random bytes.
877 if os
.path
.exists("/dev/urandom"):
878 f
= open("/dev/urandom")
883 L
= [chr(random
.randrange(0, 256)) for i
in range(n
)]
886 class AbstractDigestAuthHandler
:
887 # Digest authentication is specified in RFC 2617.
889 # XXX The client does not inspect the Authentication-Info header
890 # in a successful response.
892 # XXX It should be possible to test this implementation against
893 # a mock server that just generates a static set of challenges.
895 # XXX qop="auth-int" supports is shaky
897 def __init__(self
, passwd
=None):
899 passwd
= HTTPPasswordMgr()
901 self
.add_password
= self
.passwd
.add_password
904 self
.last_nonce
= None
906 def reset_retry_count(self
):
909 def http_error_auth_reqed(self
, auth_header
, host
, req
, headers
):
910 authreq
= headers
.get(auth_header
, None)
912 # Don't fail endlessly - if we failed once, we'll probably
913 # fail a second time. Hm. Unless the Password Manager is
914 # prompting for the information. Crap. This isn't great
915 # but it's better than the current 'repeat until recursion
916 # depth exceeded' approach <wink>
917 raise HTTPError(req
.get_full_url(), 401, "digest auth failed",
922 scheme
= authreq
.split()[0]
923 if scheme
.lower() == 'digest':
924 return self
.retry_http_digest_auth(req
, authreq
)
926 def retry_http_digest_auth(self
, req
, auth
):
927 token
, challenge
= auth
.split(' ', 1)
928 chal
= parse_keqv_list(parse_http_list(challenge
))
929 auth
= self
.get_authorization(req
, chal
)
931 auth_val
= 'Digest %s' % auth
932 if req
.headers
.get(self
.auth_header
, None) == auth_val
:
934 req
.add_unredirected_header(self
.auth_header
, auth_val
)
935 resp
= self
.parent
.open(req
, timeout
=req
.timeout
)
938 def get_cnonce(self
, nonce
):
939 # The cnonce-value is an opaque
940 # quoted string value provided by the client and used by both client
941 # and server to avoid chosen plaintext attacks, to provide mutual
942 # authentication, and to provide some message integrity protection.
943 # This isn't a fabulous effort, but it's probably Good Enough.
944 dig
= hashlib
.sha1("%s:%s:%s:%s" % (self
.nonce_count
, nonce
, time
.ctime(),
945 randombytes(8))).hexdigest()
948 def get_authorization(self
, req
, chal
):
950 realm
= chal
['realm']
951 nonce
= chal
['nonce']
952 qop
= chal
.get('qop')
953 algorithm
= chal
.get('algorithm', 'MD5')
954 # mod_digest doesn't send an opaque, even though it isn't
955 # supposed to be optional
956 opaque
= chal
.get('opaque', None)
960 H
, KD
= self
.get_algorithm_impls(algorithm
)
964 user
, pw
= self
.passwd
.find_user_password(realm
, req
.get_full_url())
968 # XXX not implemented yet
970 entdig
= self
.get_entity_digest(req
.get_data(), chal
)
974 A1
= "%s:%s:%s" % (user
, realm
, pw
)
975 A2
= "%s:%s" % (req
.get_method(),
976 # XXX selector: what about proxies and full urls
979 if nonce
== self
.last_nonce
:
980 self
.nonce_count
+= 1
983 self
.last_nonce
= nonce
985 ncvalue
= '%08x' % self
.nonce_count
986 cnonce
= self
.get_cnonce(nonce
)
987 noncebit
= "%s:%s:%s:%s:%s" % (nonce
, ncvalue
, cnonce
, qop
, H(A2
))
988 respdig
= KD(H(A1
), noncebit
)
990 respdig
= KD(H(A1
), "%s:%s" % (nonce
, H(A2
)))
992 # XXX handle auth-int.
993 raise URLError("qop '%s' is not supported." % qop
)
995 # XXX should the partial digests be encoded too?
997 base
= 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
998 'response="%s"' % (user
, realm
, nonce
, req
.get_selector(),
1001 base
+= ', opaque="%s"' % opaque
1003 base
+= ', digest="%s"' % entdig
1004 base
+= ', algorithm="%s"' % algorithm
1006 base
+= ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue
, cnonce
)
1009 def get_algorithm_impls(self
, algorithm
):
1010 # algorithm should be case-insensitive according to RFC2617
1011 algorithm
= algorithm
.upper()
1012 # lambdas assume digest modules are imported at the top level
1013 if algorithm
== 'MD5':
1014 H
= lambda x
: hashlib
.md5(x
).hexdigest()
1015 elif algorithm
== 'SHA':
1016 H
= lambda x
: hashlib
.sha1(x
).hexdigest()
1018 KD
= lambda s
, d
: H("%s:%s" % (s
, d
))
1021 def get_entity_digest(self
, data
, chal
):
1022 # XXX not implemented yet
1026 class HTTPDigestAuthHandler(BaseHandler
, AbstractDigestAuthHandler
):
1027 """An authentication protocol defined by RFC 2069
1029 Digest authentication improves on basic authentication because it
1030 does not transmit passwords in the clear.
1033 auth_header
= 'Authorization'
1034 handler_order
= 490 # before Basic auth
1036 def http_error_401(self
, req
, fp
, code
, msg
, headers
):
1037 host
= urlparse
.urlparse(req
.get_full_url())[1]
1038 retry
= self
.http_error_auth_reqed('www-authenticate',
1040 self
.reset_retry_count()
1044 class ProxyDigestAuthHandler(BaseHandler
, AbstractDigestAuthHandler
):
1046 auth_header
= 'Proxy-Authorization'
1047 handler_order
= 490 # before Basic auth
1049 def http_error_407(self
, req
, fp
, code
, msg
, headers
):
1050 host
= req
.get_host()
1051 retry
= self
.http_error_auth_reqed('proxy-authenticate',
1053 self
.reset_retry_count()
1056 class AbstractHTTPHandler(BaseHandler
):
1058 def __init__(self
, debuglevel
=0):
1059 self
._debuglevel
= debuglevel
1061 def set_http_debuglevel(self
, level
):
1062 self
._debuglevel
= level
1064 def do_request_(self
, request
):
1065 host
= request
.get_host()
1067 raise URLError('no host given')
1069 if request
.has_data(): # POST
1070 data
= request
.get_data()
1071 if not request
.has_header('Content-type'):
1072 request
.add_unredirected_header(
1074 'application/x-www-form-urlencoded')
1075 if not request
.has_header('Content-length'):
1076 request
.add_unredirected_header(
1077 'Content-length', '%d' % len(data
))
1080 if request
.has_proxy():
1081 scheme
, sel
= splittype(request
.get_selector())
1082 sel_host
, sel_path
= splithost(sel
)
1084 if not request
.has_header('Host'):
1085 request
.add_unredirected_header('Host', sel_host
)
1086 for name
, value
in self
.parent
.addheaders
:
1087 name
= name
.capitalize()
1088 if not request
.has_header(name
):
1089 request
.add_unredirected_header(name
, value
)
1093 def do_open(self
, http_class
, req
):
1094 """Return an addinfourl object for the request, using http_class.
1096 http_class must implement the HTTPConnection API from httplib.
1097 The addinfourl return value is a file-like object. It also
1098 has methods and attributes including:
1099 - info(): return a mimetools.Message object for the headers
1100 - geturl(): return the original request URL
1101 - code: HTTP status code
1103 host
= req
.get_host()
1105 raise URLError('no host given')
1107 h
= http_class(host
, timeout
=req
.timeout
) # will parse host:port
1108 h
.set_debuglevel(self
._debuglevel
)
1110 headers
= dict(req
.headers
)
1111 headers
.update(req
.unredirected_hdrs
)
1112 # We want to make an HTTP/1.1 request, but the addinfourl
1113 # class isn't prepared to deal with a persistent connection.
1114 # It will try to read all remaining data from the socket,
1115 # which will block while the server waits for the next request.
1116 # So make sure the connection gets closed after the (only)
1118 headers
["Connection"] = "close"
1120 (name
.title(), val
) for name
, val
in headers
.items())
1122 if req
._tunnel
_host
:
1124 proxy_auth_hdr
= "Proxy-Authorization"
1125 if proxy_auth_hdr
in headers
:
1126 tunnel_headers
[proxy_auth_hdr
] = headers
[proxy_auth_hdr
]
1127 # Proxy-Authorization should not be sent to origin
1129 del headers
[proxy_auth_hdr
]
1130 h
.set_tunnel(req
._tunnel
_host
, headers
=tunnel_headers
)
1133 h
.request(req
.get_method(), req
.get_selector(), req
.data
, headers
)
1135 r
= h
.getresponse(buffering
=True)
1136 except TypeError: #buffering kw not supported
1138 except socket
.error
, err
: # XXX what error?
1141 # Pick apart the HTTPResponse object to get the addinfourl
1142 # object initialized properly.
1144 # Wrap the HTTPResponse object in socket's file object adapter
1145 # for Windows. That adapter calls recv(), so delegate recv()
1146 # to read(). This weird wrapping allows the returned object to
1147 # have readline() and readlines() methods.
1149 # XXX It might be better to extract the read buffering code
1150 # out of socket._fileobject() and into a base class.
1153 fp
= socket
._fileobject
(r
, close
=True)
1155 resp
= addinfourl(fp
, r
.msg
, req
.get_full_url())
1156 resp
.code
= r
.status
1161 class HTTPHandler(AbstractHTTPHandler
):
1163 def http_open(self
, req
):
1164 return self
.do_open(httplib
.HTTPConnection
, req
)
1166 http_request
= AbstractHTTPHandler
.do_request_
1168 if hasattr(httplib
, 'HTTPS'):
1169 class HTTPSHandler(AbstractHTTPHandler
):
1171 def https_open(self
, req
):
1172 return self
.do_open(httplib
.HTTPSConnection
, req
)
1174 https_request
= AbstractHTTPHandler
.do_request_
1176 class HTTPCookieProcessor(BaseHandler
):
1177 def __init__(self
, cookiejar
=None):
1179 if cookiejar
is None:
1180 cookiejar
= cookielib
.CookieJar()
1181 self
.cookiejar
= cookiejar
1183 def http_request(self
, request
):
1184 self
.cookiejar
.add_cookie_header(request
)
1187 def http_response(self
, request
, response
):
1188 self
.cookiejar
.extract_cookies(response
, request
)
1191 https_request
= http_request
1192 https_response
= http_response
1194 class UnknownHandler(BaseHandler
):
1195 def unknown_open(self
, req
):
1196 type = req
.get_type()
1197 raise URLError('unknown url type: %s' % type)
1199 def parse_keqv_list(l
):
1200 """Parse list of key=value strings where keys are not duplicated."""
1203 k
, v
= elt
.split('=', 1)
1204 if v
[0] == '"' and v
[-1] == '"':
1209 def parse_http_list(s
):
1210 """Parse lists as described by RFC 2068 Section 2.
1212 In particular, parse comma-separated lists where the elements of
1213 the list may include quoted-strings. A quoted-string could
1214 contain a comma. A non-quoted string could have quotes in the
1215 middle. Neither commas nor quotes count if they are escaped.
1216 Only double-quotes count, not single-quotes.
1221 escape
= quote
= False
1250 return [part
.strip() for part
in res
]
1252 class FileHandler(BaseHandler
):
1253 # Use local file or FTP depending on form of URL
1254 def file_open(self
, req
):
1255 url
= req
.get_selector()
1256 if url
[:2] == '//' and url
[2:3] != '/':
1258 return self
.parent
.open(req
)
1260 return self
.open_local_file(req
)
1262 # names for the localhost
1264 def get_names(self
):
1265 if FileHandler
.names
is None:
1267 FileHandler
.names
= tuple(
1268 socket
.gethostbyname_ex('localhost')[2] +
1269 socket
.gethostbyname_ex(socket
.gethostname())[2])
1270 except socket
.gaierror
:
1271 FileHandler
.names
= (socket
.gethostbyname('localhost'),)
1272 return FileHandler
.names
1274 # not entirely sure what the rules are here
1275 def open_local_file(self
, req
):
1278 host
= req
.get_host()
1279 file = req
.get_selector()
1280 localfile
= url2pathname(file)
1282 stats
= os
.stat(localfile
)
1283 size
= stats
.st_size
1284 modified
= email
.utils
.formatdate(stats
.st_mtime
, usegmt
=True)
1285 mtype
= mimetypes
.guess_type(file)[0]
1286 headers
= mimetools
.Message(StringIO(
1287 'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
1288 (mtype
or 'text/plain', size
, modified
)))
1290 host
, port
= splitport(host
)
1292 (not port
and socket
.gethostbyname(host
) in self
.get_names()):
1293 return addinfourl(open(localfile
, 'rb'),
1294 headers
, 'file:'+file)
1295 except OSError, msg
:
1296 # urllib2 users shouldn't expect OSErrors coming from urlopen()
1298 raise URLError('file not on local host')
1300 class FTPHandler(BaseHandler
):
1301 def ftp_open(self
, req
):
1304 host
= req
.get_host()
1306 raise URLError('ftp error: no host given')
1307 host
, port
= splitport(host
)
1309 port
= ftplib
.FTP_PORT
1313 # username/password handling
1314 user
, host
= splituser(host
)
1316 user
, passwd
= splitpasswd(user
)
1319 host
= unquote(host
)
1320 user
= unquote(user
or '')
1321 passwd
= unquote(passwd
or '')
1324 host
= socket
.gethostbyname(host
)
1325 except socket
.error
, msg
:
1327 path
, attrs
= splitattr(req
.get_selector())
1328 dirs
= path
.split('/')
1329 dirs
= map(unquote
, dirs
)
1330 dirs
, file = dirs
[:-1], dirs
[-1]
1331 if dirs
and not dirs
[0]:
1334 fw
= self
.connect_ftp(user
, passwd
, host
, port
, dirs
, req
.timeout
)
1335 type = file and 'I' or 'D'
1337 attr
, value
= splitvalue(attr
)
1338 if attr
.lower() == 'type' and \
1339 value
in ('a', 'A', 'i', 'I', 'd', 'D'):
1340 type = value
.upper()
1341 fp
, retrlen
= fw
.retrfile(file, type)
1343 mtype
= mimetypes
.guess_type(req
.get_full_url())[0]
1345 headers
+= "Content-type: %s\n" % mtype
1346 if retrlen
is not None and retrlen
>= 0:
1347 headers
+= "Content-length: %d\n" % retrlen
1348 sf
= StringIO(headers
)
1349 headers
= mimetools
.Message(sf
)
1350 return addinfourl(fp
, headers
, req
.get_full_url())
1351 except ftplib
.all_errors
, msg
:
1352 raise URLError
, ('ftp error: %s' % msg
), sys
.exc_info()[2]
1354 def connect_ftp(self
, user
, passwd
, host
, port
, dirs
, timeout
):
1355 fw
= ftpwrapper(user
, passwd
, host
, port
, dirs
, timeout
)
1356 ## fw.ftp.set_debuglevel(1)
1359 class CacheFTPHandler(FTPHandler
):
1360 # XXX would be nice to have pluggable cache strategies
1361 # XXX this stuff is definitely not thread safe
1369 def setTimeout(self
, t
):
1372 def setMaxConns(self
, m
):
1375 def connect_ftp(self
, user
, passwd
, host
, port
, dirs
, timeout
):
1376 key
= user
, host
, port
, '/'.join(dirs
), timeout
1377 if key
in self
.cache
:
1378 self
.timeout
[key
] = time
.time() + self
.delay
1380 self
.cache
[key
] = ftpwrapper(user
, passwd
, host
, port
, dirs
, timeout
)
1381 self
.timeout
[key
] = time
.time() + self
.delay
1383 return self
.cache
[key
]
1385 def check_cache(self
):
1386 # first check for old ones
1388 if self
.soonest
<= t
:
1389 for k
, v
in self
.timeout
.items():
1391 self
.cache
[k
].close()
1394 self
.soonest
= min(self
.timeout
.values())
1396 # then check the size
1397 if len(self
.cache
) == self
.max_conns
:
1398 for k
, v
in self
.timeout
.items():
1399 if v
== self
.soonest
:
1403 self
.soonest
= min(self
.timeout
.values())