3 Copyright (C) 2000 Bastian Kleineidam
5 You can choose between two licenses when using this package:
7 2) PSF license for Python 2.2
9 The robots.txt Exclusion Protocol is implemented as specified in
10 http://info.webcrawler.com/mak/projects/robots/norobots-rfc.html
15 __all__
= ["RobotFileParser"]
18 class RobotFileParser
:
19 """ This class provides a set of methods to read, parse and answer
20 questions about a single robots.txt file.
24 def __init__(self
, url
=''):
26 self
.default_entry
= None
27 self
.disallow_all
= False
28 self
.allow_all
= False
33 """Returns the time the robots.txt file was last fetched.
35 This is useful for long-running web spiders that need to
36 check for new robots.txt files periodically.
39 return self
.last_checked
42 """Sets the time the robots.txt file was last fetched to the
47 self
.last_checked
= time
.time()
49 def set_url(self
, url
):
50 """Sets the URL referring to a robots.txt file."""
52 self
.host
, self
.path
= urlparse
.urlparse(url
)[1:3]
55 """Reads the robots.txt URL and feeds it to the parser."""
57 f
= opener
.open(self
.url
)
58 lines
= [line
.strip() for line
in f
]
60 self
.errcode
= opener
.errcode
61 if self
.errcode
in (401, 403):
62 self
.disallow_all
= True
63 elif self
.errcode
>= 400:
65 elif self
.errcode
== 200 and lines
:
68 def _add_entry(self
, entry
):
69 if "*" in entry
.useragents
:
70 # the default entry is considered last
71 self
.default_entry
= entry
73 self
.entries
.append(entry
)
75 def parse(self
, lines
):
76 """parse the input lines from a robots.txt file.
77 We allow that a user-agent: line is not preceded by
78 one or more blank lines."""
81 # 1: saw user-agent line
82 # 2: saw an allow or disallow line
94 self
._add
_entry
(entry
)
97 # remove optional comment and strip line
104 line
= line
.split(':', 1)
106 line
[0] = line
[0].strip().lower()
107 line
[1] = urllib
.unquote(line
[1].strip())
108 if line
[0] == "user-agent":
110 self
._add
_entry
(entry
)
112 entry
.useragents
.append(line
[1])
114 elif line
[0] == "disallow":
116 entry
.rulelines
.append(RuleLine(line
[1], False))
118 elif line
[0] == "allow":
120 entry
.rulelines
.append(RuleLine(line
[1], True))
123 self
.entries
.append(entry
)
126 def can_fetch(self
, useragent
, url
):
127 """using the parsed robots.txt decide if useragent can fetch url"""
128 if self
.disallow_all
:
132 # search for given user agent matches
133 # the first match counts
134 url
= urllib
.quote(urlparse
.urlparse(urllib
.unquote(url
))[2]) or "/"
135 for entry
in self
.entries
:
136 if entry
.applies_to(useragent
):
137 return entry
.allowance(url
)
138 # try the default entry last
139 if self
.default_entry
:
140 return self
.default_entry
.allowance(url
)
141 # agent not found ==> access granted
146 return ''.join([str(entry
) + "\n" for entry
in self
.entries
])
150 """A rule line is a single "Allow:" (allowance==True) or "Disallow:"
151 (allowance==False) followed by a path."""
152 def __init__(self
, path
, allowance
):
153 if path
== '' and not allowance
:
154 # an empty value means allow all
156 self
.path
= urllib
.quote(path
)
157 self
.allowance
= allowance
159 def applies_to(self
, filename
):
160 return self
.path
== "*" or filename
.startswith(self
.path
)
163 return (self
.allowance
and "Allow" or "Disallow") + ": " + self
.path
167 """An entry has one or more user-agents and zero or more rulelines"""
174 for agent
in self
.useragents
:
175 ret
.extend(["User-agent: ", agent
, "\n"])
176 for line
in self
.rulelines
:
177 ret
.extend([str(line
), "\n"])
180 def applies_to(self
, useragent
):
181 """check if this entry applies to the specified agent"""
182 # split the name token and make it lower case
183 useragent
= useragent
.split("/")[0].lower()
184 for agent
in self
.useragents
:
186 # we have the catch-all agent
188 agent
= agent
.lower()
189 if agent
in useragent
:
193 def allowance(self
, filename
):
195 - our agent applies to this entry
196 - filename is URL decoded"""
197 for line
in self
.rulelines
:
198 if line
.applies_to(filename
):
199 return line
.allowance
202 class URLopener(urllib
.FancyURLopener
):
203 def __init__(self
, *args
):
204 urllib
.FancyURLopener
.__init
__(self
, *args
)
207 def prompt_user_passwd(self
, host
, realm
):
208 ## If robots.txt file is accessible only with a password,
209 ## we act as if the file wasn't there.
212 def http_error_default(self
, url
, fp
, errcode
, errmsg
, headers
):
213 self
.errcode
= errcode
214 return urllib
.FancyURLopener
.http_error_default(self
, url
, fp
, errcode
,