]> granicus.if.org Git - python/commitdiff
Issue #12180: Fixed a few remaining errors in test_packaging when no threading.
authorTarek Ziade <tarek@ziade.org>
Wed, 25 May 2011 21:46:09 +0000 (23:46 +0200)
committerTarek Ziade <tarek@ziade.org>
Wed, 25 May 2011 21:46:09 +0000 (23:46 +0200)
Lib/packaging/tests/support.py
Lib/packaging/tests/test_install.py
Lib/packaging/tests/test_pypi_dist.py
Lib/packaging/tests/test_pypi_simple.py
Misc/NEWS

index 6d60b9e9a79085f76bb678196d5d1fe1b1d20e44..66b5583b838be05fcee8993effe296125ab165dd 100644 (file)
@@ -253,6 +253,15 @@ def create_distribution(configfiles=()):
     return d
 
 
+def fake_dec(*args, **kw):
+    """Fake decorator"""
+    def _wrap(func):
+        def __wrap(*args, **kw):
+            return func(*args, **kw)
+        return __wrap
+    return _wrap
+
+
 try:
     from test.support import skip_unless_symlink
 except ImportError:
index 01c3dcf6b43f7af471438041af14e89f1e18e687..c0924bfb8fd34f0e5cd37f8797f6cefac3544220 100644 (file)
@@ -6,13 +6,14 @@ from packaging import install
 from packaging.pypi.xmlrpc import Client
 from packaging.metadata import Metadata
 
-from packaging.tests.support import LoggingCatcher, TempdirManager, unittest
+from packaging.tests.support import (LoggingCatcher, TempdirManager, unittest,
+                                     fake_dec)
 try:
     import threading
     from packaging.tests.pypi_server import use_xmlrpc_server
 except ImportError:
     threading = None
-    use_xmlrpc_server = None
+    use_xmlrpc_server = fake_dec
 
 
 class InstalledDist:
index b7f4e985f1ef4dc65931cdaeb2a458e15aba580c..0c88c9b6b96d4150a78a709e6643a76f133444b4 100644 (file)
@@ -7,12 +7,13 @@ from packaging.pypi.dist import (ReleaseInfo, ReleasesList, DistInfo,
 from packaging.pypi.errors import HashDoesNotMatch, UnsupportedHashName
 
 from packaging.tests import unittest
-from packaging.tests.support import TempdirManager, requires_zlib
+from packaging.tests.support import TempdirManager, requires_zlib, fake_dec
 try:
     import threading
     from packaging.tests.pypi_server import use_pypi_server
 except ImportError:
-    threading = use_pypi_server = None
+    threading = None
+    use_pypi_server = fake_dec
 
 
 def Dist(*args, **kwargs):
index d50e3f4ecc67ccebf95084764e1c11286e28ccdd..bd50d01b39913704872851646da3bf882522b39c 100644 (file)
@@ -10,9 +10,19 @@ import urllib.request
 from packaging.pypi.simple import Crawler
 
 from packaging.tests import unittest
-from packaging.tests.support import TempdirManager, LoggingCatcher
-from packaging.tests.pypi_server import (use_pypi_server, PyPIServer,
-                                         PYPI_DEFAULT_STATIC_PATH)
+from packaging.tests.support import (TempdirManager, LoggingCatcher,
+                                     fake_dec)
+
+try:
+    import _thread
+    from packaging.tests.pypi_server import (use_pypi_server, PyPIServer,
+                                             PYPI_DEFAULT_STATIC_PATH)
+except ImportError:
+    _thread = None
+    use_pypi_server = fake_dec
+    PYPI_DEFAULT_STATIC_PATH = os.path.join(
+        os.path.dirname(os.path.abspath(__file__)), 'pypiserver')
+
 
 
 class SimpleCrawlerTestCase(TempdirManager,
@@ -28,6 +38,7 @@ class SimpleCrawlerTestCase(TempdirManager,
         return Crawler(server.full_address + base_url, *args,
                        **kwargs)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server()
     def test_bad_urls(self, server):
         crawler = Crawler()
@@ -84,6 +95,7 @@ class SimpleCrawlerTestCase(TempdirManager,
                 'http://www.famfamfam.com/">')
         crawler._process_url(url, page)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server("test_found_links")
     def test_found_links(self, server):
         # Browse the index, asking for a specified release version
@@ -139,6 +151,7 @@ class SimpleCrawlerTestCase(TempdirManager,
         self.assertTrue(
             crawler._is_browsable("http://pypi.example.org/a/path"))
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server("with_externals")
     def test_follow_externals(self, server):
         # Include external pages
@@ -149,6 +162,7 @@ class SimpleCrawlerTestCase(TempdirManager,
         self.assertIn(server.full_address + "/external/external.html",
             crawler._processed_urls)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server("with_real_externals")
     def test_restrict_hosts(self, server):
         # Only use a list of allowed hosts is possible
@@ -159,6 +173,7 @@ class SimpleCrawlerTestCase(TempdirManager,
         self.assertNotIn(server.full_address + "/external/external.html",
             crawler._processed_urls)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server(static_filesystem_paths=["with_externals"],
         static_uri_paths=["simple", "external"])
     def test_links_priority(self, server):
@@ -192,6 +207,7 @@ class SimpleCrawlerTestCase(TempdirManager,
                          releases[0].dists['sdist'].url['hashval'])
         self.assertEqual('md5', releases[0].dists['sdist'].url['hashname'])
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server(static_filesystem_paths=["with_norel_links"],
         static_uri_paths=["simple", "external"])
     def test_not_scan_all_links(self, server):
@@ -217,6 +233,7 @@ class SimpleCrawlerTestCase(TempdirManager,
         self.assertIn("%s/foobar-2.0.tar.gz" % server.full_address,
             crawler._processed_urls)  # linked from external homepage (rel)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     def test_uses_mirrors(self):
         # When the main repository seems down, try using the given mirrors"""
         server = PyPIServer("foo_bar_baz")
@@ -314,6 +331,7 @@ class SimpleCrawlerTestCase(TempdirManager,
         self.assertIn('http://example.org/some/simpleurl', found_links)
         self.assertIn('http://example.org/some/download', found_links)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server("project_list")
     def test_search_projects(self, server):
         # we can search the index for some projects, on their names
index 60209bf7a95e8b5981fa5b689e929fa24f30b656..b1d26dd22233bce2f6d06c0dc70e23dc6d215d79 100644 (file)
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -161,6 +161,9 @@ Core and Builtins
 Library
 -------
 
+- Issue #12180: Fixed a few remaining errors in test_packaging when no 
+  threading.
+
 - Issue #12175: RawIOBase.readall() now returns None if read() returns None.
 
 - Issue #12175: FileIO.readall() now raises a ValueError instead of an IOError