]> granicus.if.org Git - python/commitdiff
comply with the evilJavaNamingScheme for attribute names
authorBenjamin Peterson <benjamin@python.org>
Mon, 23 Mar 2009 22:29:45 +0000 (22:29 +0000)
committerBenjamin Peterson <benjamin@python.org>
Mon, 23 Mar 2009 22:29:45 +0000 (22:29 +0000)
It seems my love of PEP 8 overrode the need for consistentcy

Doc/library/unittest.rst
Lib/test/test_unittest.py
Lib/unittest.py

index 22b2870a55cfd2533fe900f6c67c8f0680492e21..69227cf8cbae786a02065894cef35292cd744be2 100644 (file)
@@ -950,7 +950,7 @@ tools which support interactive reporting while tests are being run.
    :func:`expectedFailure` decorator.
 
    The default implementation appends a tuple ``(test, formatted_err)`` to the
-   instance's ``expected_failures`` attribute, where *formatted_err* is a
+   instance's ``expectedFailures`` attribute, where *formatted_err* is a
    formatted traceback derived from *err*.
 
 
@@ -960,7 +960,7 @@ tools which support interactive reporting while tests are being run.
    decorator, but succeeded.
 
    The default implementation appends the test to the instance's
-   ``unexpected_successes`` attribute.
+   ``unexpectedSuccesses`` attribute.
 
 
 .. _testloader-objects:
index 6a32dbf8d155c8d63263e5c96358870285d8e999..cd8f9670da77e75d0cf9acfb34c7b34659a29f19 100644 (file)
@@ -2362,7 +2362,7 @@ class Test_TestSkipping(TestCase):
         test.run(result)
         self.assertEqual(events,
                          ['startTest', 'addExpectedFailure', 'stopTest'])
-        self.assertEqual(result.expected_failures[0][0], test)
+        self.assertEqual(result.expectedFailures[0][0], test)
         self.assertTrue(result.wasSuccessful())
 
     def test_unexpected_success(self):
@@ -2377,7 +2377,7 @@ class Test_TestSkipping(TestCase):
         self.assertEqual(events,
                          ['startTest', 'addUnexpectedSuccess', 'stopTest'])
         self.assertFalse(result.failures)
-        self.assertEqual(result.unexpected_successes, [test])
+        self.assertEqual(result.unexpectedSuccesses, [test])
         self.assertTrue(result.wasSuccessful())
 
 
index 8263887a6a238c2ec16c567d25573143f1a8de16..74585eee005f09649087a7cfea8d09778a5f8d11 100644 (file)
@@ -176,8 +176,8 @@ class TestResult(object):
         self.errors = []
         self.testsRun = 0
         self.skipped = []
-        self.expected_failures = []
-        self.unexpected_successes = []
+        self.expectedFailures = []
+        self.unexpectedSuccesses = []
         self.shouldStop = False
 
     def startTest(self, test):
@@ -209,12 +209,12 @@ class TestResult(object):
 
     def addExpectedFailure(self, test, err):
         """Called when an expected failure/error occured."""
-        self.expected_failures.append(
+        self.expectedFailures.append(
             (test, self._exc_info_to_string(err, test)))
 
     def addUnexpectedSuccess(self, test):
         """Called when a test was expected to fail, but succeed."""
-        self.unexpected_successes.append(test)
+        self.unexpectedSuccesses.append(test)
 
     def wasSuccessful(self):
         "Tells whether or not this result was a success"
@@ -923,10 +923,10 @@ class TextTestRunner(object):
         self.stream.writeln("Ran %d test%s in %.3fs" %
                             (run, run != 1 and "s" or "", timeTaken))
         self.stream.writeln()
-        results = map(len, (result.expected_failures,
-                            result.unexpected_successes,
+        results = map(len, (result.expectedFailures,
+                            result.unexpectedSuccesses,
                             result.skipped))
-        expected_fails, unexpected_successes, skipped = results
+        expectedFails, unexpectedSuccesses, skipped = results
         infos = []
         if not result.wasSuccessful():
             self.stream.write("FAILED")