Merged revisions 84597-84599 via svnmerge from

svn+ssh://pythondev@svn.python.org/python/branches/py3k

........
  r84597 | antoine.pitrou | 2010-09-07 22:42:19 +0200 (mar., 07 sept. 2010) | 5 lines

  Issue #8574: better implementation of test.support.transient_internet().
  Original patch by Victor.
........
  r84598 | antoine.pitrou | 2010-09-07 23:05:49 +0200 (mar., 07 sept. 2010) | 6 lines

  Issue #9792: In case of connection failure, socket.create_connection()
  would swallow the exception and raise a new one, making it impossible
  to fetch the original errno, or to filter timeout errors.  Now the
  original error is re-raised.
........
  r84599 | antoine.pitrou | 2010-09-07 23:09:09 +0200 (mar., 07 sept. 2010) | 4 lines

  Improve transient_internet() again to detect more network errors,
  and use it in test_robotparser. Fixes #8574.
........
This commit is contained in:
Antoine Pitrou 2010-09-07 21:22:56 +00:00
parent 824cf253e5
commit 4d7979be72
6 changed files with 134 additions and 46 deletions

View file

@ -235,23 +235,24 @@ class NetworkTestCase(unittest.TestCase):
def testPasswordProtectedSite(self):
support.requires('network')
# XXX it depends on an external resource which could be unavailable
url = 'http://mueblesmoraleda.com'
parser = urllib.robotparser.RobotFileParser()
parser.set_url(url)
try:
parser.read()
except URLError:
self.skipTest('%s is unavailable' % url)
self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False)
with support.transient_internet('mueblesmoraleda.com'):
url = 'http://mueblesmoraleda.com'
parser = urllib.robotparser.RobotFileParser()
parser.set_url(url)
try:
parser.read()
except URLError:
self.skipTest('%s is unavailable' % url)
self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False)
def testPythonOrg(self):
support.requires('network')
parser = urllib.robotparser.RobotFileParser(
"http://www.python.org/robots.txt")
parser.read()
self.assertTrue(parser.can_fetch("*",
"http://www.python.org/robots.txt"))
with support.transient_internet('www.python.org'):
parser = urllib.robotparser.RobotFileParser(
"http://www.python.org/robots.txt")
parser.read()
self.assertTrue(
parser.can_fetch("*", "http://www.python.org/robots.txt"))
def test_main():
support.run_unittest(NetworkTestCase)