mirror of
https://github.com/python/cpython.git
synced 2025-07-18 08:45:20 +00:00
SF patch #911431: robot.txt must be robots.txt
(Contributed by George Yoshida.)
This commit is contained in:
parent
3aa82c07f7
commit
2d95f1ad57
2 changed files with 3 additions and 3 deletions
|
@ -83,7 +83,7 @@ class RobotFileParser:
|
|||
self.entries.append(entry)
|
||||
|
||||
def parse(self, lines):
|
||||
"""parse the input lines from a robot.txt file.
|
||||
"""parse the input lines from a robots.txt file.
|
||||
We allow that a user-agent: line is not preceded by
|
||||
one or more blank lines."""
|
||||
state = 0
|
||||
|
@ -148,7 +148,7 @@ class RobotFileParser:
|
|||
|
||||
def can_fetch(self, useragent, url):
|
||||
"""using the parsed robots.txt decide if useragent can fetch url"""
|
||||
_debug("Checking robot.txt allowance for:\n user agent: %s\n url: %s" %
|
||||
_debug("Checking robots.txt allowance for:\n user agent: %s\n url: %s" %
|
||||
(useragent, url))
|
||||
if self.disallow_all:
|
||||
return False
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue