# actually override the attribute
setattr(object_to_patch, attr_name, new_value)
+
+
+def run_in_subinterp(code):
+ """
+ Run code in a subinterpreter. Raise unittest.SkipTest if the tracemalloc
+ module is enabled.
+ """
+ # Issue #10915, #15751: PyGILState_*() functions don't work with
+ # sub-interpreters, the tracemalloc module uses these functions internally
+ try:
+ import tracemalloc
+ except ImportError:
+ pass
+ else:
+ if tracemalloc.is_tracing():
+ raise unittest.SkipTest("run_in_subinterp() cannot be used "
+ "if tracemalloc module is tracing "
+ "memory allocations")
++ import _testcapi
+ return _testcapi.run_in_subinterp(code)
- import _testcapi
import codecs
+import contextlib
import io
import locale
import sys
# Initial tests are copied as is from "test_poll.py"
-import os, select, random, unittest, sys
+import os
+import random
+import select
+import sys
+import unittest
- from test.support import TESTFN, run_unittest
- from _testcapi import USHRT_MAX
+ from test.support import TESTFN, run_unittest, cpython_only
try:
select.devpoll
self.assertRaises(OverflowError, pollster.poll, 1 << 63)
self.assertRaises(OverflowError, pollster.poll, 1 << 64)
-
+ def test_close(self):
+ open_file = open(__file__, "rb")
+ self.addCleanup(open_file.close)
+ fd = open_file.fileno()
+ devpoll = select.devpoll()
+
+ # test fileno() method and closed attribute
+ self.assertIsInstance(devpoll.fileno(), int)
+ self.assertFalse(devpoll.closed)
+
+ # test close()
+ devpoll.close()
+ self.assertTrue(devpoll.closed)
+ self.assertRaises(ValueError, devpoll.fileno)
+
+ # close() can be called more than once
+ devpoll.close()
+
+ # operations must fail with ValueError("I/O operation on closed ...")
+ self.assertRaises(ValueError, devpoll.modify, fd, select.POLLIN)
+ self.assertRaises(ValueError, devpoll.poll)
+ self.assertRaises(ValueError, devpoll.register, fd, fd, select.POLLIN)
+ self.assertRaises(ValueError, devpoll.unregister, fd)
+
+ def test_fd_non_inheritable(self):
+ devpoll = select.devpoll()
+ self.addCleanup(devpoll.close)
+ self.assertEqual(os.get_inheritable(devpoll.fileno()), False)
+
def test_events_mask_overflow(self):
pollster = select.devpoll()
w, r = os.pipe()
finally:
os.close(fd)
+ def test_flock(self):
+ # Solaris needs readable file for shared lock
+ self.f = open(TESTFN, 'wb+')
+ fileno = self.f.fileno()
+ fcntl.flock(fileno, fcntl.LOCK_SH)
+ fcntl.flock(fileno, fcntl.LOCK_UN)
+ fcntl.flock(self.f, fcntl.LOCK_SH | fcntl.LOCK_NB)
+ fcntl.flock(self.f, fcntl.LOCK_UN)
+ fcntl.flock(fileno, fcntl.LOCK_EX)
+ fcntl.flock(fileno, fcntl.LOCK_UN)
+
+ self.assertRaises(ValueError, fcntl.flock, -1, fcntl.LOCK_SH)
+ self.assertRaises(TypeError, fcntl.flock, 'spam', fcntl.LOCK_SH)
++
++ @cpython_only
++ def test_flock_overflow(self):
++ import _testcapi
+ self.assertRaises(OverflowError, fcntl.flock, _testcapi.INT_MAX+1,
+ fcntl.LOCK_SH)
+
def test_main():
run_unittest(TestFcntl)
self.assertRaises(OSError, _FileIO, make_bad_fd())
if sys.platform == 'win32':
import msvcrt
- self.assertRaises(IOError, msvcrt.get_osfhandle, make_bad_fd())
+ self.assertRaises(OSError, msvcrt.get_osfhandle, make_bad_fd())
+
+ @cpython_only
+ def testInvalidFd_overflow(self):
# Issue 15989
+ import _testcapi
self.assertRaises(TypeError, _FileIO, _testcapi.INT_MAX + 1)
self.assertRaises(TypeError, _FileIO, _testcapi.INT_MIN - 1)
--- /dev/null
- import _testcapi
+"""
+Tests for object finalization semantics, as outlined in PEP 442.
+"""
+
+import contextlib
+import gc
+import unittest
+import weakref
+
- @_testcapi.with_tp_del
++try:
++ from _testcapi import with_tp_del
++except ImportError:
++ def with_tp_del(cls):
++ class C(object):
++ def __new__(cls, *args, **kwargs):
++ raise TypeError('requires _testcapi.with_tp_del')
++ return C
++
+from test import support
+
+
+class NonGCSimpleBase:
+ """
+ The base class for all the objects under test, equipped with various
+ testing features.
+ """
+
+ survivors = []
+ del_calls = []
+ tp_del_calls = []
+ errors = []
+
+ _cleaning = False
+
+ __slots__ = ()
+
+ @classmethod
+ def _cleanup(cls):
+ cls.survivors.clear()
+ cls.errors.clear()
+ gc.garbage.clear()
+ gc.collect()
+ cls.del_calls.clear()
+ cls.tp_del_calls.clear()
+
+ @classmethod
+ @contextlib.contextmanager
+ def test(cls):
+ """
+ A context manager to use around all finalization tests.
+ """
+ with support.disable_gc():
+ cls.del_calls.clear()
+ cls.tp_del_calls.clear()
+ NonGCSimpleBase._cleaning = False
+ try:
+ yield
+ if cls.errors:
+ raise cls.errors[0]
+ finally:
+ NonGCSimpleBase._cleaning = True
+ cls._cleanup()
+
+ def check_sanity(self):
+ """
+ Check the object is sane (non-broken).
+ """
+
+ def __del__(self):
+ """
+ PEP 442 finalizer. Record that this was called, check the
+ object is in a sane state, and invoke a side effect.
+ """
+ try:
+ if not self._cleaning:
+ self.del_calls.append(id(self))
+ self.check_sanity()
+ self.side_effect()
+ except Exception as e:
+ self.errors.append(e)
+
+ def side_effect(self):
+ """
+ A side effect called on destruction.
+ """
+
+
+class SimpleBase(NonGCSimpleBase):
+
+ def __init__(self):
+ self.id_ = id(self)
+
+ def check_sanity(self):
+ assert self.id_ == id(self)
+
+
+class NonGC(NonGCSimpleBase):
+ __slots__ = ()
+
+class NonGCResurrector(NonGCSimpleBase):
+ __slots__ = ()
+
+ def side_effect(self):
+ """
+ Resurrect self by storing self in a class-wide list.
+ """
+ self.survivors.append(self)
+
+class Simple(SimpleBase):
+ pass
+
+class SimpleResurrector(NonGCResurrector, SimpleBase):
+ pass
+
+
+class TestBase:
+
+ def setUp(self):
+ self.old_garbage = gc.garbage[:]
+ gc.garbage[:] = []
+
+ def tearDown(self):
+ # None of the tests here should put anything in gc.garbage
+ try:
+ self.assertEqual(gc.garbage, [])
+ finally:
+ del self.old_garbage
+ gc.collect()
+
+ def assert_del_calls(self, ids):
+ self.assertEqual(sorted(SimpleBase.del_calls), sorted(ids))
+
+ def assert_tp_del_calls(self, ids):
+ self.assertEqual(sorted(SimpleBase.tp_del_calls), sorted(ids))
+
+ def assert_survivors(self, ids):
+ self.assertEqual(sorted(id(x) for x in SimpleBase.survivors), sorted(ids))
+
+ def assert_garbage(self, ids):
+ self.assertEqual(sorted(id(x) for x in gc.garbage), sorted(ids))
+
+ def clear_survivors(self):
+ SimpleBase.survivors.clear()
+
+
+class SimpleFinalizationTest(TestBase, unittest.TestCase):
+ """
+ Test finalization without refcycles.
+ """
+
+ def test_simple(self):
+ with SimpleBase.test():
+ s = Simple()
+ ids = [id(s)]
+ wr = weakref.ref(s)
+ del s
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_survivors([])
+ self.assertIs(wr(), None)
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_survivors([])
+
+ def test_simple_resurrect(self):
+ with SimpleBase.test():
+ s = SimpleResurrector()
+ ids = [id(s)]
+ wr = weakref.ref(s)
+ del s
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_survivors(ids)
+ self.assertIsNot(wr(), None)
+ self.clear_survivors()
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_survivors([])
+ self.assertIs(wr(), None)
+
+ def test_non_gc(self):
+ with SimpleBase.test():
+ s = NonGC()
+ self.assertFalse(gc.is_tracked(s))
+ ids = [id(s)]
+ del s
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_survivors([])
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_survivors([])
+
+ def test_non_gc_resurrect(self):
+ with SimpleBase.test():
+ s = NonGCResurrector()
+ self.assertFalse(gc.is_tracked(s))
+ ids = [id(s)]
+ del s
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_survivors(ids)
+ self.clear_survivors()
+ gc.collect()
+ self.assert_del_calls(ids * 2)
+ self.assert_survivors(ids)
+
+
+class SelfCycleBase:
+
+ def __init__(self):
+ super().__init__()
+ self.ref = self
+
+ def check_sanity(self):
+ super().check_sanity()
+ assert self.ref is self
+
+class SimpleSelfCycle(SelfCycleBase, Simple):
+ pass
+
+class SelfCycleResurrector(SelfCycleBase, SimpleResurrector):
+ pass
+
+class SuicidalSelfCycle(SelfCycleBase, Simple):
+
+ def side_effect(self):
+ """
+ Explicitly break the reference cycle.
+ """
+ self.ref = None
+
+
+class SelfCycleFinalizationTest(TestBase, unittest.TestCase):
+ """
+ Test finalization of an object having a single cyclic reference to
+ itself.
+ """
+
+ def test_simple(self):
+ with SimpleBase.test():
+ s = SimpleSelfCycle()
+ ids = [id(s)]
+ wr = weakref.ref(s)
+ del s
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_survivors([])
+ self.assertIs(wr(), None)
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_survivors([])
+
+ def test_simple_resurrect(self):
+ # Test that __del__ can resurrect the object being finalized.
+ with SimpleBase.test():
+ s = SelfCycleResurrector()
+ ids = [id(s)]
+ wr = weakref.ref(s)
+ del s
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_survivors(ids)
+ # XXX is this desirable?
+ self.assertIs(wr(), None)
+ # When trying to destroy the object a second time, __del__
+ # isn't called anymore (and the object isn't resurrected).
+ self.clear_survivors()
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_survivors([])
+ self.assertIs(wr(), None)
+
+ def test_simple_suicide(self):
+ # Test the GC is able to deal with an object that kills its last
+ # reference during __del__.
+ with SimpleBase.test():
+ s = SuicidalSelfCycle()
+ ids = [id(s)]
+ wr = weakref.ref(s)
+ del s
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_survivors([])
+ self.assertIs(wr(), None)
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_survivors([])
+ self.assertIs(wr(), None)
+
+
+class ChainedBase:
+
+ def chain(self, left):
+ self.suicided = False
+ self.left = left
+ left.right = self
+
+ def check_sanity(self):
+ super().check_sanity()
+ if self.suicided:
+ assert self.left is None
+ assert self.right is None
+ else:
+ left = self.left
+ if left.suicided:
+ assert left.right is None
+ else:
+ assert left.right is self
+ right = self.right
+ if right.suicided:
+ assert right.left is None
+ else:
+ assert right.left is self
+
+class SimpleChained(ChainedBase, Simple):
+ pass
+
+class ChainedResurrector(ChainedBase, SimpleResurrector):
+ pass
+
+class SuicidalChained(ChainedBase, Simple):
+
+ def side_effect(self):
+ """
+ Explicitly break the reference cycle.
+ """
+ self.suicided = True
+ self.left = None
+ self.right = None
+
+
+class CycleChainFinalizationTest(TestBase, unittest.TestCase):
+ """
+ Test finalization of a cyclic chain. These tests are similar in
+ spirit to the self-cycle tests above, but the collectable object
+ graph isn't trivial anymore.
+ """
+
+ def build_chain(self, classes):
+ nodes = [cls() for cls in classes]
+ for i in range(len(nodes)):
+ nodes[i].chain(nodes[i-1])
+ return nodes
+
+ def check_non_resurrecting_chain(self, classes):
+ N = len(classes)
+ with SimpleBase.test():
+ nodes = self.build_chain(classes)
+ ids = [id(s) for s in nodes]
+ wrs = [weakref.ref(s) for s in nodes]
+ del nodes
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_survivors([])
+ self.assertEqual([wr() for wr in wrs], [None] * N)
+ gc.collect()
+ self.assert_del_calls(ids)
+
+ def check_resurrecting_chain(self, classes):
+ N = len(classes)
+ with SimpleBase.test():
+ nodes = self.build_chain(classes)
+ N = len(nodes)
+ ids = [id(s) for s in nodes]
+ survivor_ids = [id(s) for s in nodes if isinstance(s, SimpleResurrector)]
+ wrs = [weakref.ref(s) for s in nodes]
+ del nodes
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_survivors(survivor_ids)
+ # XXX desirable?
+ self.assertEqual([wr() for wr in wrs], [None] * N)
+ self.clear_survivors()
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_survivors([])
+
+ def test_homogenous(self):
+ self.check_non_resurrecting_chain([SimpleChained] * 3)
+
+ def test_homogenous_resurrect(self):
+ self.check_resurrecting_chain([ChainedResurrector] * 3)
+
+ def test_homogenous_suicidal(self):
+ self.check_non_resurrecting_chain([SuicidalChained] * 3)
+
+ def test_heterogenous_suicidal_one(self):
+ self.check_non_resurrecting_chain([SuicidalChained, SimpleChained] * 2)
+
+ def test_heterogenous_suicidal_two(self):
+ self.check_non_resurrecting_chain(
+ [SuicidalChained] * 2 + [SimpleChained] * 2)
+
+ def test_heterogenous_resurrect_one(self):
+ self.check_resurrecting_chain([ChainedResurrector, SimpleChained] * 2)
+
+ def test_heterogenous_resurrect_two(self):
+ self.check_resurrecting_chain(
+ [ChainedResurrector, SimpleChained, SuicidalChained] * 2)
+
+ def test_heterogenous_resurrect_three(self):
+ self.check_resurrecting_chain(
+ [ChainedResurrector] * 2 + [SimpleChained] * 2 + [SuicidalChained] * 2)
+
+
+# NOTE: the tp_del slot isn't automatically inherited, so we have to call
+# with_tp_del() for each instantiated class.
+
+class LegacyBase(SimpleBase):
+
+ def __del__(self):
+ try:
+ # Do not invoke side_effect here, since we are now exercising
+ # the tp_del slot.
+ if not self._cleaning:
+ self.del_calls.append(id(self))
+ self.check_sanity()
+ except Exception as e:
+ self.errors.append(e)
+
+ def __tp_del__(self):
+ """
+ Legacy (pre-PEP 442) finalizer, mapped to a tp_del slot.
+ """
+ try:
+ if not self._cleaning:
+ self.tp_del_calls.append(id(self))
+ self.check_sanity()
+ self.side_effect()
+ except Exception as e:
+ self.errors.append(e)
+
- @_testcapi.with_tp_del
++@with_tp_del
+class Legacy(LegacyBase):
+ pass
+
- @_testcapi.with_tp_del
++@with_tp_del
+class LegacyResurrector(LegacyBase):
+
+ def side_effect(self):
+ """
+ Resurrect self by storing self in a class-wide list.
+ """
+ self.survivors.append(self)
+
++@with_tp_del
+class LegacySelfCycle(SelfCycleBase, LegacyBase):
+ pass
+
+
++@support.cpython_only
+class LegacyFinalizationTest(TestBase, unittest.TestCase):
+ """
+ Test finalization of objects with a tp_del.
+ """
+
+ def tearDown(self):
+ # These tests need to clean up a bit more, since they create
+ # uncollectable objects.
+ gc.garbage.clear()
+ gc.collect()
+ super().tearDown()
+
+ def test_legacy(self):
+ with SimpleBase.test():
+ s = Legacy()
+ ids = [id(s)]
+ wr = weakref.ref(s)
+ del s
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_tp_del_calls(ids)
+ self.assert_survivors([])
+ self.assertIs(wr(), None)
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_tp_del_calls(ids)
+
+ def test_legacy_resurrect(self):
+ with SimpleBase.test():
+ s = LegacyResurrector()
+ ids = [id(s)]
+ wr = weakref.ref(s)
+ del s
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_tp_del_calls(ids)
+ self.assert_survivors(ids)
+ # weakrefs are cleared before tp_del is called.
+ self.assertIs(wr(), None)
+ self.clear_survivors()
+ gc.collect()
+ self.assert_del_calls(ids)
+ self.assert_tp_del_calls(ids * 2)
+ self.assert_survivors(ids)
+ self.assertIs(wr(), None)
+
+ def test_legacy_self_cycle(self):
+ # Self-cycles with legacy finalizers end up in gc.garbage.
+ with SimpleBase.test():
+ s = LegacySelfCycle()
+ ids = [id(s)]
+ wr = weakref.ref(s)
+ del s
+ gc.collect()
+ self.assert_del_calls([])
+ self.assert_tp_del_calls([])
+ self.assert_survivors([])
+ self.assert_garbage(ids)
+ self.assertIsNot(wr(), None)
+ # Break the cycle to allow collection
+ gc.garbage[0].ref = None
+ self.assert_garbage([])
+ self.assertIs(wr(), None)
+
+
+def test_main():
+ support.run_unittest(__name__)
+
+if __name__ == "__main__":
+ test_main()
finally:
locale.setlocale(locale.LC_ALL, oldloc)
+ @support.cpython_only
+ def test_optimisations(self):
+ text = "abcde" # 5 characters
+
+ self.assertIs("%s" % text, text)
+ self.assertIs("%.5s" % text, text)
+ self.assertIs("%.10s" % text, text)
+ self.assertIs("%1s" % text, text)
+ self.assertIs("%5s" % text, text)
+ self.assertIs("{0}".format(text), text)
+ self.assertIs("{0:s}".format(text), text)
+ self.assertIs("{0:.5s}".format(text), text)
+ self.assertIs("{0:.10s}".format(text), text)
+ self.assertIs("{0:1s}".format(text), text)
+ self.assertIs("{0:5s}".format(text), text)
-def test_main():
- support.run_unittest(FormatTest)
+ self.assertIs(text % (), text)
+ self.assertIs(text.format(), text)
- @support.cpython_only
def test_precision(self):
- from _testcapi import INT_MAX
-
f = 1.2
self.assertEqual(format(f, ".0f"), "1")
self.assertEqual(format(f, ".3f"), "1.200")
with self.assertRaises(ValueError) as cm:
- format(f, ".%sf" % (INT_MAX + 1))
+ format(f, ".%sf" % (sys.maxsize + 1))
- self.assertEqual(str(cm.exception), "precision too big")
c = complex(f)
self.assertEqual(format(c, ".0f"), "1+0j")
self.assertEqual(format(c, ".3f"), "1.200+0.000j")
- self.assertEqual(str(cm.exception), "precision too big")
+ with self.assertRaises(ValueError) as cm:
+ format(c, ".%sf" % (sys.maxsize + 1))
+
+ @support.cpython_only
+ def test_precision_c_limits(self):
+ from _testcapi import INT_MAX
+
+ f = 1.2
+ with self.assertRaises(ValueError) as cm:
+ format(f, ".%sf" % (INT_MAX + 1))
+
+ c = complex(f)
with self.assertRaises(ValueError) as cm:
format(c, ".%sf" % (INT_MAX + 1))
- import _testcapi
import unittest
from test.support import (verbose, refcount_test, run_unittest,
-- strip_python_stderr)
++ strip_python_stderr, cpython_only)
+from test.script_helper import assert_python_ok, make_script, temp_dir
+
import sys
import time
import gc
except ImportError:
threading = None
++try:
++ from _testcapi import with_tp_del
++except ImportError:
++ def with_tp_del(cls):
++ class C(object):
++ def __new__(cls, *args, **kwargs):
++ raise TypeError('requires _testcapi.with_tp_del')
++ return C
++
### Support code
###############################################################################
# gc collects it.
self.wr = weakref.ref(C1055820(666), it_happened)
- @_testcapi.with_tp_del
++@with_tp_del
class Uncollectable(object):
"""Create a reference cycle with multiple __del__ methods.
del a
self.assertNotEqual(gc.collect(), 0)
- def test_finalizer(self):
++ @cpython_only
+ def test_legacy_finalizer(self):
# A() is uncollectable if it is part of a cycle, make sure it shows up
# in gc.garbage.
- @_testcapi.with_tp_del
++ @with_tp_del
class A:
- def __del__(self): pass
+ def __tp_del__(self): pass
class B:
pass
a = A()
self.fail("didn't find obj in garbage (finalizer)")
gc.garbage.remove(obj)
- def test_finalizer_newclass(self):
++ @cpython_only
+ def test_legacy_finalizer_newclass(self):
# A() is uncollectable if it is part of a cycle, make sure it shows up
# in gc.garbage.
- @_testcapi.with_tp_del
++ @with_tp_del
class A(object):
- def __del__(self): pass
+ def __tp_del__(self): pass
class B(object):
pass
a = A()
# would be damaged, with an empty __dict__.
self.assertEqual(x, None)
++ @cpython_only
def test_garbage_at_shutdown(self):
import subprocess
code = """if 1:
info = v[2]
self.assertEqual(info["generation"], 2)
++ @cpython_only
def test_collect_garbage(self):
self.preclean()
# Each of these cause four objects to be garbage: Two
import unittest
from test import support
+ # Skip this test if the _testcapi module isn't available.
+ support.import_module('_testcapi')
from _testcapi import getargs_keywords, getargs_keyword_only
-
-"""
-> How about the following counterproposal. This also changes some of
-> the other format codes to be a little more regular.
->
-> Code C type Range check
->
-> b unsigned char 0..UCHAR_MAX
-> h signed short SHRT_MIN..SHRT_MAX
-> B unsigned char none **
-> H unsigned short none **
-> k * unsigned long none
-> I * unsigned int 0..UINT_MAX
-
-
-> i int INT_MIN..INT_MAX
-> l long LONG_MIN..LONG_MAX
-
-> K * unsigned long long none
-> L long long LLONG_MIN..LLONG_MAX
-
-> Notes:
->
-> * New format codes.
->
-> ** Changed from previous "range-and-a-half" to "none"; the
-> range-and-a-half checking wasn't particularly useful.
-
-Plus a C API or two, e.g. PyInt_AsLongMask() ->
-unsigned long and PyInt_AsLongLongMask() -> unsigned
-long long (if that exists).
-"""
+try:
+ from _testcapi import getargs_L, getargs_K
+except ImportError:
+ getargs_L = None # PY_LONG_LONG not available
+
+# > How about the following counterproposal. This also changes some of
+# > the other format codes to be a little more regular.
+# >
+# > Code C type Range check
+# >
+# > b unsigned char 0..UCHAR_MAX
+# > h signed short SHRT_MIN..SHRT_MAX
+# > B unsigned char none **
+# > H unsigned short none **
+# > k * unsigned long none
+# > I * unsigned int 0..UINT_MAX
+#
+#
+# > i int INT_MIN..INT_MAX
+# > l long LONG_MIN..LONG_MAX
+#
+# > K * unsigned long long none
+# > L long long LLONG_MIN..LLONG_MAX
+#
+# > Notes:
+# >
+# > * New format codes.
+# >
+# > ** Changed from previous "range-and-a-half" to "none"; the
+# > range-and-a-half checking wasn't particularly useful.
+#
+# Plus a C API or two, e.g. PyInt_AsLongMask() ->
+# unsigned long and PyInt_AsLongLongMask() -> unsigned
+# long long (if that exists).
LARGE = 0x7FFFFFFF
VERY_LARGE = 0xFF0000121212121212121242
- import _testcapi
-import re
-import sys
-import types
-import unittest
+import collections
+import datetime
+import functools
+import importlib
import inspect
+import io
import linecache
-import datetime
-import collections
import os
-import shutil
from os.path import normcase
+import _pickle
+import re
+import shutil
+import sys
+import types
+import unicodedata
+import unittest
-from test.support import run_unittest, TESTFN, DirsOnSysPath
+try:
+ from concurrent.futures import ThreadPoolExecutor
+except ImportError:
+ ThreadPoolExecutor = None
- from test.support import run_unittest, TESTFN, DirsOnSysPath
++from test.support import run_unittest, TESTFN, DirsOnSysPath, cpython_only
+from test.support import MISSING_C_DOCSTRINGS
+from test.script_helper import assert_python_ok, assert_python_failure
from test import inspect_fodder as mod
from test import inspect_fodder2 as mod2
kwonlyargs_e=['arg'],
formatted='(*, arg)')
+ def test_getfullargspec_signature_attr(self):
+ def test():
+ pass
+ spam_param = inspect.Parameter('spam', inspect.Parameter.POSITIONAL_ONLY)
+ test.__signature__ = inspect.Signature(parameters=(spam_param,))
+
+ self.assertFullArgSpecEquals(test, args_e=['spam'], formatted='(spam)')
+
+ def test_getfullargspec_signature_annos(self):
+ def test(a:'spam') -> 'ham': pass
+ spec = inspect.getfullargspec(test)
+ self.assertEqual(test.__annotations__, spec.annotations)
+
+ def test(): pass
+ spec = inspect.getfullargspec(test)
+ self.assertEqual(test.__annotations__, spec.annotations)
+
+ @unittest.skipIf(MISSING_C_DOCSTRINGS,
+ "Signature information for builtins requires docstrings")
+ def test_getfullargspec_builtin_methods(self):
+ self.assertFullArgSpecEquals(_pickle.Pickler.dump,
+ args_e=['self', 'obj'], formatted='(self, obj)')
+
+ self.assertFullArgSpecEquals(_pickle.Pickler(io.BytesIO()).dump,
+ args_e=['self', 'obj'], formatted='(self, obj)')
+
++ @cpython_only
+ @unittest.skipIf(MISSING_C_DOCSTRINGS,
+ "Signature information for builtins requires docstrings")
+ def test_getfullagrspec_builtin_func(self):
++ import _testcapi
+ builtin = _testcapi.docstring_with_signature_with_defaults
+ spec = inspect.getfullargspec(builtin)
+ self.assertEqual(spec.defaults[0], 'avocado')
+
++ @cpython_only
+ @unittest.skipIf(MISSING_C_DOCSTRINGS,
+ "Signature information for builtins requires docstrings")
+ def test_getfullagrspec_builtin_func_no_signature(self):
++ import _testcapi
+ builtin = _testcapi.docstring_no_signature
+ with self.assertRaises(TypeError):
+ inspect.getfullargspec(builtin)
def test_getargspec_method(self):
class A(object):
('kwargs', ..., int, "var_keyword")),
...))
- def test_signature_on_builtin_function(self):
- with self.assertRaisesRegex(ValueError, 'not supported by signature'):
- inspect.signature(type)
- with self.assertRaisesRegex(ValueError, 'not supported by signature'):
- # support for 'wrapper_descriptor'
- inspect.signature(type.__call__)
- with self.assertRaisesRegex(ValueError, 'not supported by signature'):
- # support for 'method-wrapper'
- inspect.signature(min.__call__)
- with self.assertRaisesRegex(ValueError,
- 'no signature found for builtin function'):
- # support for 'method-wrapper'
- inspect.signature(min)
++ @cpython_only
+ @unittest.skipIf(MISSING_C_DOCSTRINGS,
+ "Signature information for builtins requires docstrings")
+ def test_signature_on_builtins(self):
++ import _testcapi
+
+ def test_unbound_method(o):
+ """Use this to test unbound methods (things that should have a self)"""
+ signature = inspect.signature(o)
+ self.assertTrue(isinstance(signature, inspect.Signature))
+ self.assertEqual(list(signature.parameters.values())[0].name, 'self')
+ return signature
+
+ def test_callable(o):
+ """Use this to test bound methods or normal callables (things that don't expect self)"""
+ signature = inspect.signature(o)
+ self.assertTrue(isinstance(signature, inspect.Signature))
+ if signature.parameters:
+ self.assertNotEqual(list(signature.parameters.values())[0].name, 'self')
+ return signature
+
+ signature = test_callable(_testcapi.docstring_with_signature_with_defaults)
+ def p(name): return signature.parameters[name].default
+ self.assertEqual(p('s'), 'avocado')
+ self.assertEqual(p('b'), b'bytes')
+ self.assertEqual(p('d'), 3.14)
+ self.assertEqual(p('i'), 35)
+ self.assertEqual(p('n'), None)
+ self.assertEqual(p('t'), True)
+ self.assertEqual(p('f'), False)
+ self.assertEqual(p('local'), 3)
+ self.assertEqual(p('sys'), sys.maxsize)
+ self.assertEqual(p('exp'), sys.maxsize - 1)
+
+ test_callable(type)
+ test_callable(object)
+
+ # normal method
+ # (PyMethodDescr_Type, "method_descriptor")
+ test_unbound_method(_pickle.Pickler.dump)
+ d = _pickle.Pickler(io.StringIO())
+ test_callable(d.dump)
+
+ # static method
+ test_callable(str.maketrans)
+ test_callable('abc'.maketrans)
+
+ # class method
+ test_callable(dict.fromkeys)
+ test_callable({}.fromkeys)
+
+ # wrapper around slot (PyWrapperDescr_Type, "wrapper_descriptor")
+ test_unbound_method(type.__call__)
+ test_unbound_method(int.__add__)
+ test_callable((3).__add__)
+
+ # _PyMethodWrapper_Type
+ # support for 'method-wrapper'
+ test_callable(min.__call__)
+
+ class ThisWorksNow:
+ __call__ = type
+ test_callable(ThisWorksNow())
+
++ @cpython_only
+ @unittest.skipIf(MISSING_C_DOCSTRINGS,
+ "Signature information for builtins requires docstrings")
+ def test_signature_on_decorated_builtins(self):
++ import _testcapi
+ func = _testcapi.docstring_with_signature_with_defaults
+
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs) -> int:
+ return func(*args, **kwargs)
+ return wrapper
+
+ decorated_func = decorator(func)
+
+ self.assertEqual(inspect.signature(func),
+ inspect.signature(decorated_func))
+
++ @cpython_only
+ def test_signature_on_builtins_no_signature(self):
++ import _testcapi
+ with self.assertRaisesRegex(ValueError, 'no signature found for builtin'):
+ inspect.signature(_testcapi.docstring_no_signature)
def test_signature_on_non_function(self):
with self.assertRaisesRegex(TypeError, 'is not a callable object'):
# Test case for the os.poll() function
import os
+import subprocess
import random
import select
- from _testcapi import USHRT_MAX, INT_MAX, UINT_MAX
try:
import threading
except ImportError:
ret, out, err = assert_python_ok(*args)
self.assertIn(b"free PyDictObjects", err)
+ # The function has no parameter
+ self.assertRaises(TypeError, sys._debugmallocstats, True)
+
+ @unittest.skipUnless(hasattr(sys, "getallocatedblocks"),
+ "sys.getallocatedblocks unavailable on this build")
+ def test_getallocatedblocks(self):
+ # Some sanity checks
+ with_pymalloc = sysconfig.get_config_var('WITH_PYMALLOC')
+ a = sys.getallocatedblocks()
+ self.assertIs(type(a), int)
+ if with_pymalloc:
+ self.assertGreater(a, 0)
+ else:
+ # When WITH_PYMALLOC isn't available, we don't know anything
+ # about the underlying implementation: the function might
+ # return 0 or something greater.
+ self.assertGreaterEqual(a, 0)
+ try:
+ # While we could imagine a Python session where the number of
+ # multiple buffer objects would exceed the sharing of references,
+ # it is unlikely to happen in a normal test run.
+ self.assertLess(a, sys.gettotalrefcount())
+ except AttributeError:
+ # gettotalrefcount() not available
+ pass
+ gc.collect()
+ b = sys.getallocatedblocks()
+ self.assertLessEqual(b, a)
+ gc.collect()
+ c = sys.getallocatedblocks()
+ self.assertIn(c, range(b - 50, b + 50))
+
+
+ @test.support.cpython_only
class SizeofTest(unittest.TestCase):
def setUp(self):
t.join()
self.assertRaises(ValueError, bs.release)
-class ThreadJoinOnShutdown(BaseTestCase):
+ def test_locals_at_exit(self):
+ # Issue #19466: thread locals must not be deleted before destructors
+ # are called
+ rc, out, err = assert_python_ok("-c", """if 1:
+ import threading
+
+ class Atexit:
+ def __del__(self):
+ print("thread_dict.atexit = %r" % thread_dict.atexit)
+
+ thread_dict = threading.local()
+ thread_dict.atexit = "atexit"
- # Between fork() and exec(), only async-safe functions are allowed (issues
- # #12316 and #11870), and fork() from a worker thread is known to trigger
- # problems with some operating systems (issue #3863): skip problematic tests
- # on platforms known to behave badly.
- platforms_to_skip = ('freebsd4', 'freebsd5', 'freebsd6', 'netbsd5',
- 'os2emx', 'hp-ux11')
+ atexit = Atexit()
+ """)
+ self.assertEqual(out.rstrip(), b"thread_dict.atexit = 'atexit'")
+
+ def test_warnings_at_exit(self):
+ # Issue #19466: try to call most destructors at Python shutdown before
+ # destroying Python thread states
+ filename = __file__
+ rc, out, err = assert_python_ok("-Wd", "-c", """if 1:
+ import time
+ import threading
+
+ def open_sleep():
+ # a warning will be emitted when the open file will be
+ # destroyed (without being explicitly closed) while the daemon
+ # thread is destroyed
+ fileobj = open(%a, 'rb')
+ start_event.set()
+ time.sleep(60.0)
+
+ start_event = threading.Event()
+
+ thread = threading.Thread(target=open_sleep)
+ thread.daemon = True
+ thread.start()
+
+ # wait until the thread started
+ start_event.wait()
+ """ % filename)
+ self.assertRegex(err.rstrip(),
+ b"^sys:1: ResourceWarning: unclosed file ")
+
++ @cpython_only
+ def test_frame_tstate_tracing(self):
+ # Issue #14432: Crash when a generator is created in a C thread that is
+ # destroyed while the generator is still used. The issue was that a
+ # generator contains a frame, and the frame kept a reference to the
+ # Python state of the destroyed C thread. The crash occurs when a trace
+ # function is setup.
+
+ def noop_trace(frame, event, arg):
+ # no operation
+ return noop_trace
+
+ def generator():
+ while 1:
+ yield "genereator"
+
+ def callback():
+ if callback.gen is None:
+ callback.gen = generator()
+ return next(callback.gen)
+ callback.gen = None
+
+ old_trace = sys.gettrace()
+ sys.settrace(noop_trace)
+ try:
+ # Install a trace function
+ threading.settrace(noop_trace)
+
+ # Create a generator in a C thread which exits after the call
++ import _testcapi
+ _testcapi.call_in_temporary_c_thread(callback)
+
+ # Call the generator in a different Python thread, check that the
+ # generator didn't keep a reference to the destroyed thread state
+ for test in range(3):
+ # The trace function is still called here
+ callback()
+ finally:
+ sys.settrace(old_trace)
+
+
+class ThreadJoinOnShutdown(BaseTestCase):
def _run_and_join(self, script):
script = """if 1:
for t in threads:
t.join()
- @cpython_only
- @unittest.skipIf(_testcapi is None, "need _testcapi module")
- def test_frame_tstate_tracing(self):
- # Issue #14432: Crash when a generator is created in a C thread that is
- # destroyed while the generator is still used. The issue was that a
- # generator contains a frame, and the frame kept a reference to the
- # Python state of the destroyed C thread. The crash occurs when a trace
- # function is setup.
+ @unittest.skipUnless(hasattr(os, 'fork'), "needs os.fork()")
+ def test_clear_threads_states_after_fork(self):
+ # Issue #17094: check that threads states are cleared after fork()
- def noop_trace(frame, event, arg):
- # no operation
- return noop_trace
+ # start a bunch of threads
+ threads = []
+ for i in range(16):
+ t = threading.Thread(target=lambda : time.sleep(0.3))
+ threads.append(t)
+ t.start()
- def generator():
- while 1:
- yield "genereator"
+ pid = os.fork()
+ if pid == 0:
+ # check that threads states have been cleared
+ if len(sys._current_frames()) == 1:
+ os._exit(0)
+ else:
+ os._exit(1)
+ else:
+ _, status = os.waitpid(pid, 0)
+ self.assertEqual(0, status)
- def callback():
- if callback.gen is None:
- callback.gen = generator()
- return next(callback.gen)
- callback.gen = None
+ for t in threads:
+ t.join()
- old_trace = sys.gettrace()
- sys.settrace(noop_trace)
- try:
- # Install a trace function
- threading.settrace(noop_trace)
- # Create a generator in a C thread which exits after the call
- _testcapi.call_in_temporary_c_thread(callback)
+class SubinterpThreadingTests(BaseTestCase):
- # Call the generator in a different Python thread, check that the
- # generator didn't keep a reference to the destroyed thread state
- for test in range(3):
- # The trace function is still called here
- callback()
- finally:
- sys.settrace(old_trace)
+ def test_threads_join(self):
+ # Non-daemon threads should be joined at subinterpreter shutdown
+ # (issue #18808)
+ r, w = os.pipe()
+ self.addCleanup(os.close, r)
+ self.addCleanup(os.close, w)
+ code = r"""if 1:
+ import os
+ import threading
+ import time
+
+ def f():
+ # Sleep a bit so that the thread is still running when
+ # Py_EndInterpreter is called.
+ time.sleep(0.05)
+ os.write(%d, b"x")
+ threading.Thread(target=f).start()
+ """ % (w,)
+ ret = test.support.run_in_subinterp(code)
+ self.assertEqual(ret, 0)
+ # The thread was joined properly.
+ self.assertEqual(os.read(r, 1), b"x")
+
+ def test_threads_join_2(self):
+ # Same as above, but a delay gets introduced after the thread's
+ # Python code returned but before the thread state is deleted.
+ # To achieve this, we register a thread-local object which sleeps
+ # a bit when deallocated.
+ r, w = os.pipe()
+ self.addCleanup(os.close, r)
+ self.addCleanup(os.close, w)
+ code = r"""if 1:
+ import os
+ import threading
+ import time
+
+ class Sleeper:
+ def __del__(self):
+ time.sleep(0.05)
+
+ tls = threading.local()
+
+ def f():
+ # Sleep a bit so that the thread is still running when
+ # Py_EndInterpreter is called.
+ time.sleep(0.05)
+ tls.x = Sleeper()
+ os.write(%d, b"x")
+ threading.Thread(target=f).start()
+ """ % (w,)
+ ret = test.support.run_in_subinterp(code)
+ self.assertEqual(ret, 0)
+ # The thread was joined properly.
+ self.assertEqual(os.read(r, 1), b"x")
+
++ @cpython_only
+ def test_daemon_threads_fatal_error(self):
+ subinterp_code = r"""if 1:
+ import os
+ import threading
+ import time
+
+ def f():
+ # Make sure the daemon thread is still running when
+ # Py_EndInterpreter is called.
+ time.sleep(10)
+ threading.Thread(target=f, daemon=True).start()
+ """
+ script = r"""if 1:
+ import _testcapi
+
+ _testcapi.run_in_subinterp(%r)
+ """ % (subinterp_code,)
+ with test.support.SuppressCrashReport():
+ rc, out, err = assert_python_failure("-c", script)
+ self.assertIn("Fatal Python error: Py_EndInterpreter: "
+ "not the last thread", err.decode())
class ThreadingExceptionTests(BaseTestCase):
class TracebackFormatTests(unittest.TestCase):
- def test_traceback_format(self):
+ def some_exception(self):
+ raise KeyError('blah')
+
+ @cpython_only
+ def check_traceback_format(self, cleanup_func=None):
+ from _testcapi import traceback_print
try:
- raise KeyError('blah')
+ self.some_exception()
except KeyError:
type_, value, tb = sys.exc_info()
+ if cleanup_func is not None:
+ # Clear the inner frames, not this one
+ cleanup_func(tb.tb_next)
traceback_fmt = 'Traceback (most recent call last):\n' + \
''.join(traceback.format_tb(tb))
file_ = StringIO()
self.assertEqual('%.1s' % "a\xe9\u20ac", 'a')
self.assertEqual('%.2s' % "a\xe9\u20ac", 'a\xe9')
- @support.cpython_only
+ #issue 19995
+ class PsuedoInt:
+ def __init__(self, value):
+ self.value = int(value)
+ def __int__(self):
+ return self.value
+ def __index__(self):
+ return self.value
+ class PsuedoFloat:
+ def __init__(self, value):
+ self.value = float(value)
+ def __int__(self):
+ return int(self.value)
+ pi = PsuedoFloat(3.1415)
+ letter_m = PsuedoInt(109)
+ self.assertEqual('%x' % 42, '2a')
+ self.assertEqual('%X' % 15, 'F')
+ self.assertEqual('%o' % 9, '11')
+ self.assertEqual('%c' % 109, 'm')
+ self.assertEqual('%x' % letter_m, '6d')
+ self.assertEqual('%X' % letter_m, '6D')
+ self.assertEqual('%o' % letter_m, '155')
+ self.assertEqual('%c' % letter_m, 'm')
+ self.assertWarns(DeprecationWarning, '%x'.__mod__, pi),
+ self.assertWarns(DeprecationWarning, '%x'.__mod__, 3.14),
+ self.assertWarns(DeprecationWarning, '%X'.__mod__, 2.11),
+ self.assertWarns(DeprecationWarning, '%o'.__mod__, 1.79),
+ self.assertWarns(DeprecationWarning, '%c'.__mod__, pi),
+
+ def test_formatting_with_enum(self):
+ # issue18780
+ import enum
+ class Float(float, enum.Enum):
+ PI = 3.1415926
+ class Int(enum.IntEnum):
+ IDES = 15
+ class Str(str, enum.Enum):
+ ABC = 'abc'
+ # Testing Unicode formatting strings...
+ self.assertEqual("%s, %s" % (Str.ABC, Str.ABC),
+ 'Str.ABC, Str.ABC')
+ self.assertEqual("%s, %s, %d, %i, %u, %f, %5.2f" %
+ (Str.ABC, Str.ABC,
+ Int.IDES, Int.IDES, Int.IDES,
+ Float.PI, Float.PI),
+ 'Str.ABC, Str.ABC, 15, 15, 15, 3.141593, 3.14')
+
+ # formatting jobs delegated from the string implementation:
+ self.assertEqual('...%(foo)s...' % {'foo':Str.ABC},
+ '...Str.ABC...')
+ self.assertEqual('...%(foo)s...' % {'foo':Int.IDES},
+ '...Int.IDES...')
+ self.assertEqual('...%(foo)i...' % {'foo':Int.IDES},
+ '...15...')
+ self.assertEqual('...%(foo)d...' % {'foo':Int.IDES},
+ '...15...')
+ self.assertEqual('...%(foo)u...' % {'foo':Int.IDES, 'def':Float.PI},
+ '...15...')
+ self.assertEqual('...%(foo)f...' % {'foo':Float.PI,'def':123},
+ '...3.141593...')
+
def test_formatting_huge_precision(self):
+ format_string = "%.{}f".format(sys.maxsize + 1)
+ with self.assertRaises(ValueError):
+ result = format_string % 2.34
+
+ @support.cpython_only
+ def test_formatting_huge_precision_c_limits(self):
from _testcapi import INT_MAX
format_string = "%.{}f".format(INT_MAX + 1)
with self.assertRaises(ValueError):
# not supported: copy the raw format string. these tests are just here
# to check for crashs and should not be considered as specifications
- self.assertEqual(PyUnicode_FromFormat(b'%1%s', b'abc'), '%s')
- self.assertEqual(PyUnicode_FromFormat(b'%1abc'), '%1abc')
- self.assertEqual(PyUnicode_FromFormat(b'%+i', c_int(10)), '%+i')
- self.assertEqual(PyUnicode_FromFormat(b'%.%s', b'abc'), '%.%s')
+ check_format('%s',
+ b'%1%s', b'abc')
+ check_format('%1abc',
+ b'%1abc')
+ check_format('%+i',
+ b'%+i', c_int(10))
+ check_format('%.%s',
+ b'%.%s', b'abc')
# Test PyUnicode_AsWideChar()
+ @support.cpython_only
def test_aswidechar(self):
from _testcapi import unicode_aswidechar
support.import_module('ctypes')
codecs.StreamReader returned incomplete data when were called after
readline() or read(size). Based on patch by Amaury Forgeot d'Arc.
+- Issue #20105: the codec exception chaining now correctly sets the
+ traceback of the original exception as its __traceback__ attribute.
+
+- Issue #17481: inspect.getfullargspec() now uses inspect.signature() API.
+
+- Issue #15304: concurrent.futures.wait() can block forever even if
+ Futures have completed. Patch by Glenn Langford.
+
+- Issue #14455: plistlib: fix serializing integers integers in the range
+ of an unsigned long long but outside of the range of signed long long for
+ binary plist files.
+
+IDLE
+----
+
+- Update the python.gif icon for the Idle classbrowser and pathbowser
+ from the old green snake to the new new blue and yellow snakes.
+
+- Issue #17721: Remove non-functional configuration dialog help button until we
+ make it actually gives some help when clicked. Patch by Guilherme Simões.
+
+Tests
+-----
+
++- Issue #20532: Tests which use _testcapi now are marked as CPython only.
++
+- Issue #19920: Added tests for TarFile.list(). Based on patch by Vajrasky Kok.
+
+- Issue #19990: Added tests for the imghdr module. Based on patch by
+ Claudiu Popa.
+
+- Issue #20474: Fix test_socket "unexpected success" failures on OS X 10.7+.
+
+Tools/Demos
+-----------
+
+- #Issue 20456: Argument Clinic now observes the C preprocessor conditional
+ compilation statements of the C files it parses. When a Clinic block is
+ inside a conditional code, it adjusts its output to match, including
+ automatically generating an empty methoddef macro.
+
+- #Issue 20456: Cloned functions in Argument Clinic now use the correct
+ name, not the name of the function they were cloned from, for text
+ strings inside generated code.
+
+- #Issue 20456: Fixed Argument Clinic's test suite and "--converters" feature.
+
+- #Issue 20456: Argument Clinic now allows specifying different names
+ for a parameter in Python and C, using "as" on the parameter line.
+
+- Issue #20326: Argument Clinic now uses a simple, unique signature to
+ annotate text signatures in docstrings, resulting in fewer false
+ positives. "self" parameters are also explicitly marked, allowing
+ inspect.Signature() to authoritatively detect (and skip) said parameters.
+
+- Issue #20326: Argument Clinic now generates separate checksums for the
+ input and output sections of the block, allowing external tools to verify
+ that the input has not changed (and thus the output is not out-of-date).
+
+Build
+-----
+
+- Issue #20465: Update SQLite shipped with OS X installer to 3.8.3.
+
+Documentation
+-------------
+
+- Issue #20488: Change wording to say importlib is *the* implementation of
+ import instead of just *an* implementation.
+
+- Issue #6386: Clarify in the tutorial that specifying a symlink to execute
+ means the directory containing the executed script and not the symlink is
+ added to sys.path.
+
+
+What's New in Python 3.4.0 Beta 3?
+==================================
+
+Release date: 2014-01-26
+
+Core and Builtins
+-----------------
+
+- Issue #20189: Four additional builtin types (PyTypeObject,
+ PyMethodDescr_Type, _PyMethodWrapper_Type, and PyWrapperDescr_Type)
+ have been modified to provide introspection information for builtins.
+
+- Issue #17825: Cursor "^" is correctly positioned for SyntaxError and
+ IndentationError.
+
+- Issue #2382: SyntaxError cursor "^" is now written at correct position in most
+ cases when multibyte characters are in line (before "^"). This still not
+ works correctly with wide East Asian characters.
+
+- Issue #18960: The first line of Python script could be executed twice when
+ the source encoding was specified on the second line. Now the source encoding
+ declaration on the second line isn't effective if the first line contains
+ anything except a comment. 'python -x' works now again with files with the
+ source encoding declarations, and can be used to make Python batch files
+ on Windows.
+
+Library
+-------
+
+- asyncio: Various improvements and small changes not all covered by
+ issues listed below. E.g. wait_for() now cancels the inner task if
+ the timeout occcurs; tweaked the set of exported symbols; renamed
+ Empty/Full to QueueEmpty/QueueFull; "with (yield from lock)" now
+ uses a separate context manager; readexactly() raises if not enough
+ data was read; PTY support tweaks.
+
+- Issue #20311: asyncio: Add a granularity attribute to BaseEventLoop: maximum
+ between the resolution of the BaseEventLoop.time() method and the resolution
+ of the selector. The granuarility is used in the scheduler to round time and
+ deadline.
+
+- Issue #20311: selectors: Add a resolution attribute to BaseSelector.
+
+- Issue #20189: unittest.mock now no longer assumes that any object for
+ which it could get an inspect.Signature is a callable written in Python.
+ Fix courtesy of Michael Foord.
+
- Issue #20317: ExitStack.__exit__ could create a self-referential loop if an
exception raised by a cleanup operation already had its context set
correctly (for example, by the @contextmanager decorator). The infinite