import time
-def _open_with_retry(func, host, *args, **kwargs):
- # Connecting to remote hosts is flaky. Make it more robust
- # by retrying the connection several times.
- last_exc = None
- for i in range(3):
- try:
- return func(host, *args, **kwargs)
- except IOError as err:
- last_exc = err
- continue
- except:
- raise
- raise last_exc
-
-
class URLTimeoutTest(unittest.TestCase):
TIMEOUT = 10.0
socket.setdefaulttimeout(None)
def testURLread(self):
- f = _open_with_retry(urllib.request.urlopen, "http://www.python.org/")
+ with support.transient_internet("www.python.org"):
+ f = urllib.request.urlopen("http://www.python.org/")
x = f.read()
class urlopenNetworkTests(unittest.TestCase):
"""
- def urlopen(self, *args):
- return _open_with_retry(urllib.request.urlopen, *args)
+ def urlopen(self, *args, **kwargs):
+ resource = args[0]
+ with support.transient_internet(resource):
+ return urllib.request.urlopen(*args, **kwargs)
def test_basic(self):
# Simple test expected to pass.
# test can't pass on Windows.
return
# Make sure fd returned by fileno is valid.
- open_url = self.urlopen("http://www.python.org/")
+ open_url = self.urlopen("http://www.python.org/", timeout=None)
fd = open_url.fileno()
FILE = os.fdopen(fd, encoding='utf-8')
try:
"""Tests urllib.request.urlretrieve using the network."""
def urlretrieve(self, *args):
- return _open_with_retry(urllib.request.urlretrieve, *args)
+ resource = args[0]
+ with support.transient_internet(resource):
+ return urllib.request.urlretrieve(*args)
def test_basic(self):
# Test basic functionality.