PageRenderTime 295ms CodeModel.GetById 121ms app.highlight 14ms RepoModel.GetById 157ms app.codeStats 0ms

/Lib/test/test_urllibnet.py

http://unladen-swallow.googlecode.com/
Python | 194 lines | 168 code | 12 blank | 14 comment | 8 complexity | 815baf1a83ab37d1d8b3d0bb6b75d31e MD5 | raw file
  1#!/usr/bin/env python
  2
  3import unittest
  4from test import test_support
  5
  6import socket
  7import urllib
  8import sys
  9import os
 10import mimetools
 11
 12
 13def _open_with_retry(func, host, *args, **kwargs):
 14    # Connecting to remote hosts is flaky.  Make it more robust
 15    # by retrying the connection several times.
 16    for i in range(3):
 17        try:
 18            return func(host, *args, **kwargs)
 19        except IOError, last_exc:
 20            continue
 21        except:
 22            raise
 23    raise last_exc
 24
 25
 26class URLTimeoutTest(unittest.TestCase):
 27
 28    TIMEOUT = 10.0
 29
 30    def setUp(self):
 31        socket.setdefaulttimeout(self.TIMEOUT)
 32
 33    def tearDown(self):
 34        socket.setdefaulttimeout(None)
 35
 36    def testURLread(self):
 37        f = _open_with_retry(urllib.urlopen, "http://www.python.org/")
 38        x = f.read()
 39
 40class urlopenNetworkTests(unittest.TestCase):
 41    """Tests urllib.urlopen using the network.
 42
 43    These tests are not exhaustive.  Assuming that testing using files does a
 44    good job overall of some of the basic interface features.  There are no
 45    tests exercising the optional 'data' and 'proxies' arguments.  No tests
 46    for transparent redirection have been written.
 47
 48    setUp is not used for always constructing a connection to
 49    http://www.python.org/ since there a few tests that don't use that address
 50    and making a connection is expensive enough to warrant minimizing unneeded
 51    connections.
 52
 53    """
 54
 55    def urlopen(self, *args):
 56        return _open_with_retry(urllib.urlopen, *args)
 57
 58    def test_basic(self):
 59        # Simple test expected to pass.
 60        open_url = self.urlopen("http://www.python.org/")
 61        for attr in ("read", "readline", "readlines", "fileno", "close",
 62                     "info", "geturl"):
 63            self.assert_(hasattr(open_url, attr), "object returned from "
 64                            "urlopen lacks the %s attribute" % attr)
 65        try:
 66            self.assert_(open_url.read(), "calling 'read' failed")
 67        finally:
 68            open_url.close()
 69
 70    def test_readlines(self):
 71        # Test both readline and readlines.
 72        open_url = self.urlopen("http://www.python.org/")
 73        try:
 74            self.assert_(isinstance(open_url.readline(), basestring),
 75                         "readline did not return a string")
 76            self.assert_(isinstance(open_url.readlines(), list),
 77                         "readlines did not return a list")
 78        finally:
 79            open_url.close()
 80
 81    def test_info(self):
 82        # Test 'info'.
 83        open_url = self.urlopen("http://www.python.org/")
 84        try:
 85            info_obj = open_url.info()
 86        finally:
 87            open_url.close()
 88            self.assert_(isinstance(info_obj, mimetools.Message),
 89                         "object returned by 'info' is not an instance of "
 90                         "mimetools.Message")
 91            self.assertEqual(info_obj.getsubtype(), "html")
 92
 93    def test_geturl(self):
 94        # Make sure same URL as opened is returned by geturl.
 95        URL = "http://www.python.org/"
 96        open_url = self.urlopen(URL)
 97        try:
 98            gotten_url = open_url.geturl()
 99        finally:
100            open_url.close()
101        self.assertEqual(gotten_url, URL)
102
103    def test_getcode(self):
104        # test getcode() with the fancy opener to get 404 error codes
105        URL = "http://www.python.org/XXXinvalidXXX"
106        open_url = urllib.FancyURLopener().open(URL)
107        try:
108            code = open_url.getcode()
109        finally:
110            open_url.close()
111        self.assertEqual(code, 404)
112
113    def test_fileno(self):
114        if (sys.platform in ('win32',) or
115                not hasattr(os, 'fdopen')):
116            # On Windows, socket handles are not file descriptors; this
117            # test can't pass on Windows.
118            return
119        # Make sure fd returned by fileno is valid.
120        open_url = self.urlopen("http://www.python.org/")
121        fd = open_url.fileno()
122        FILE = os.fdopen(fd)
123        try:
124            self.assert_(FILE.read(), "reading from file created using fd "
125                                      "returned by fileno failed")
126        finally:
127            FILE.close()
128
129    def test_bad_address(self):
130        # Make sure proper exception is raised when connecting to a bogus
131        # address.
132        self.assertRaises(IOError,
133                          # SF patch 809915:  In Sep 2003, VeriSign started
134                          # highjacking invalid .com and .net addresses to
135                          # boost traffic to their own site.  This test
136                          # started failing then.  One hopes the .invalid
137                          # domain will be spared to serve its defined
138                          # purpose.
139                          # urllib.urlopen, "http://www.sadflkjsasadf.com/")
140                          urllib.urlopen, "http://sadflkjsasf.i.nvali.d/")
141
142class urlretrieveNetworkTests(unittest.TestCase):
143    """Tests urllib.urlretrieve using the network."""
144
145    def urlretrieve(self, *args):
146        return _open_with_retry(urllib.urlretrieve, *args)
147
148    def test_basic(self):
149        # Test basic functionality.
150        file_location,info = self.urlretrieve("http://www.python.org/")
151        self.assert_(os.path.exists(file_location), "file location returned by"
152                        " urlretrieve is not a valid path")
153        FILE = file(file_location)
154        try:
155            self.assert_(FILE.read(), "reading from the file location returned"
156                         " by urlretrieve failed")
157        finally:
158            FILE.close()
159            os.unlink(file_location)
160
161    def test_specified_path(self):
162        # Make sure that specifying the location of the file to write to works.
163        file_location,info = self.urlretrieve("http://www.python.org/",
164                                              test_support.TESTFN)
165        self.assertEqual(file_location, test_support.TESTFN)
166        self.assert_(os.path.exists(file_location))
167        FILE = file(file_location)
168        try:
169            self.assert_(FILE.read(), "reading from temporary file failed")
170        finally:
171            FILE.close()
172            os.unlink(file_location)
173
174    def test_header(self):
175        # Make sure header returned as 2nd value from urlretrieve is good.
176        file_location, header = self.urlretrieve("http://www.python.org/")
177        os.unlink(file_location)
178        self.assert_(isinstance(header, mimetools.Message),
179                     "header is not an instance of mimetools.Message")
180
181
182
183def test_main():
184    test_support.requires('network')
185    from warnings import filterwarnings, catch_warnings
186    with catch_warnings():
187        filterwarnings('ignore', '.*urllib\.urlopen.*Python 3.0',
188                        DeprecationWarning)
189        test_support.run_unittest(URLTimeoutTest,
190                                  urlopenNetworkTests,
191                                  urlretrieveNetworkTests)
192
193if __name__ == "__main__":
194    test_main()