SF patch #911431: robot.txt must be robots.txt

(Contributed by George Yoshida.)
This commit is contained in:
Raymond Hettinger 2004-03-13 20:27:23 +00:00
parent 3aa82c07f7
commit 2d95f1ad57
2 changed files with 3 additions and 3 deletions

View file

@ -83,7 +83,7 @@ class RobotFileParser:
self.entries.append(entry)
def parse(self, lines):
"""parse the input lines from a robot.txt file.
"""parse the input lines from a robots.txt file.
We allow that a user-agent: line is not preceded by
one or more blank lines."""
state = 0
@ -148,7 +148,7 @@ class RobotFileParser:
def can_fetch(self, useragent, url):
"""using the parsed robots.txt decide if useragent can fetch url"""
_debug("Checking robot.txt allowance for:\n user agent: %s\n url: %s" %
_debug("Checking robots.txt allowance for:\n user agent: %s\n url: %s" %
(useragent, url))
if self.disallow_all:
return False