mirror of
https://github.com/python/cpython.git
synced 2025-11-03 03:22:27 +00:00
Two new sections. Preliminary.
This commit is contained in:
parent
32abe6f7d0
commit
3c9f936eee
3 changed files with 239 additions and 0 deletions
68
Doc/lib/librobotparser.tex
Normal file
68
Doc/lib/librobotparser.tex
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
\section{\module{robotparser} ---
|
||||
Parser for \filenq{robots.txt}}
|
||||
|
||||
\declaremodule{standard}{robotparser}
|
||||
\modulesynopsis{Accepts as input a list of lines or URL that refers to a
|
||||
robots.txt file, parses the file, then builds a
|
||||
set of rules from that list and answers questions
|
||||
about fetchability of other URLs.}
|
||||
\sectionauthor{Skip Montanaro}{skip@mojam.com}
|
||||
|
||||
\index{WWW}
|
||||
\index{World-Wide Web}
|
||||
\index{URL}
|
||||
\index{robots.txt}
|
||||
|
||||
This module provides a single class, \class{RobotFileParser}, which answers
|
||||
questions about whether or not a particular user agent can fetch a URL on
|
||||
the web site that published the \file{robots.txt} file. For more details on
|
||||
the structure of \file{robots.txt} files, see
|
||||
\url{http://info.webcrawler.com/mak/projects/robots/norobots.html}.
|
||||
|
||||
\begin{classdesc}{RobotFileParser}{}
|
||||
|
||||
This class provides a set of methods to read, parse and answer questions
|
||||
about a single \file{robots.txt} file.
|
||||
|
||||
\begin{methoddesc}{set_url}{url}
|
||||
Sets the URL referring to a \file{robots.txt} file.
|
||||
\end{methoddesc}
|
||||
|
||||
\begin{methoddesc}{read}{}
|
||||
Reads the \file{robots.txt} URL and feeds it to the parser.
|
||||
\end{methoddesc}
|
||||
|
||||
\begin{methoddesc}{parse}{lines}
|
||||
Parses the lines argument.
|
||||
\end{methoddesc}
|
||||
|
||||
\begin{methoddesc}{can_fetch}{useragent, url}
|
||||
Returns true if the \var{useragent} is allowed to fetch the \var{url}
|
||||
according to the rules contained in the parsed \file{robots.txt} file.
|
||||
\end{methoddesc}
|
||||
|
||||
\begin{methoddesc}{mtime}{}
|
||||
Returns the time the \code{robots.txt} file was last fetched. This is
|
||||
useful for long-running web spiders that need to check for new
|
||||
\code{robots.txt} files periodically.
|
||||
\end{methoddesc}
|
||||
|
||||
\begin{methoddesc}{modified}{}
|
||||
Sets the time the \code{robots.txt} file was last fetched to the current
|
||||
time.
|
||||
\end{methoddesc}
|
||||
|
||||
\end{classdesc}
|
||||
|
||||
The following example demonstrates basic use of the RobotFileParser class.
|
||||
|
||||
\begin{verbatim}
|
||||
>>> import robotparser
|
||||
>>> rp = robotparser.RobotFileParser()
|
||||
>>> rp.set_url("http://www.musi-cal.com/robots.txt")
|
||||
>>> rp.read()
|
||||
>>> rp.can_fetch("*", "http://www.musi-cal.com/cgi-bin/search?city=San+Francisco")
|
||||
0
|
||||
>>> rp.can_fetch("*", "http://www.musi-cal.com/")
|
||||
1
|
||||
\end{verbatim}
|
||||
Loading…
Add table
Add a link
Reference in a new issue