]> granicus.if.org Git - python/commitdiff
bpo-30341: Improve _PyTrash_thread_destroy_chain() a little bit (#1545)
authorXiang Zhang <angwerzx@126.com>
Sat, 13 May 2017 05:36:14 +0000 (13:36 +0800)
committerGitHub <noreply@github.com>
Sat, 13 May 2017 05:36:14 +0000 (13:36 +0800)
* add a comment about why we need to increase trash_delete_nesting
* move increase and decrese outside of the loop

Include/object.h
Objects/object.c

index 63e37b8d33a68c98e0ee096c152384a45268caa7..f5ed70b11296829ba2f2e7722eec9749e94a09a2 100644 (file)
@@ -1029,7 +1029,7 @@ without deallocating anything (and so unbounded call-stack depth is avoided).
 When the call stack finishes unwinding again, code generated by the END macro
 notices this, and calls another routine to deallocate all the objects that
 may have been added to the list of deferred deallocations.  In effect, a
-chain of N deallocations is broken into N / PyTrash_UNWIND_LEVEL pieces,
+chain of N deallocations is broken into (N-1)/(PyTrash_UNWIND_LEVEL-1) pieces,
 with the call stack never exceeding a depth of PyTrash_UNWIND_LEVEL.
 */
 
index 2d79e2f9c131d98d47aceea17b4a45e8a0ed8ed8..2ba6e572ea61ff983ef1b20efa88f18c0950c663 100644 (file)
@@ -2093,6 +2093,19 @@ void
 _PyTrash_thread_destroy_chain(void)
 {
     PyThreadState *tstate = PyThreadState_GET();
+    /* We need to increase trash_delete_nesting here, otherwise,
+       _PyTrash_thread_destroy_chain will be called recursively
+       and then possibly crash.  An example that may crash without
+       increase:
+           N = 500000  # need to be large enough
+           ob = object()
+           tups = [(ob,) for i in range(N)]
+           for i in range(49):
+               tups = [(tup,) for tup in tups]
+           del tups
+    */
+    assert(tstate->trash_delete_nesting == 0);
+    ++tstate->trash_delete_nesting;
     while (tstate->trash_delete_later) {
         PyObject *op = tstate->trash_delete_later;
         destructor dealloc = Py_TYPE(op)->tp_dealloc;
@@ -2107,10 +2120,10 @@ _PyTrash_thread_destroy_chain(void)
          * up distorting allocation statistics.
          */
         assert(op->ob_refcnt == 0);
-        ++tstate->trash_delete_nesting;
         (*dealloc)(op);
-        --tstate->trash_delete_nesting;
+        assert(tstate->trash_delete_nesting == 1);
     }
+    --tstate->trash_delete_nesting;
 }
 
 #ifndef Py_TRACE_REFS