Fix other re-entrancy nits for the lru_cache.
authorRaymond Hettinger <python@rcn.com>
Mon, 4 Mar 2013 08:34:09 +0000 (03:34 -0500)
committerRaymond Hettinger <python@rcn.com>
Mon, 4 Mar 2013 08:34:09 +0000 (03:34 -0500)
Keep references for oldkey and oldvalue so they can't
trigger a __del__ method to reenter our thread.

Move the cache[key]=link step to the end, after the link
data is in a consistent state.

Under exotic circumstances, the cache[key]=link step could
trigger reentrancy (i.e. the key would have to have a hash
exactly equal to that for another key in the cache and the
key would need a __eq__ method that makes a reentrant call
our cached function).

Lib/functools.py

index 36466f9c1797e4537aac9a49eab51fb2ce991493..87c1b6992e2ffc363f9d1eed304ce63562f87eea 100644 (file)
@@ -267,19 +267,23 @@ def lru_cache(maxsize=128, typed=False):
                         # computed result and update the count of misses.
                         pass
                     elif full:
-                        # use root to store the new key and result
-                        root[KEY] = key
-                        root[RESULT] = result
-                        cache[key] = root
+                        # use the old root to store the new key and result
+                        oldroot = root
+                        oldroot[KEY] = key
+                        oldroot[RESULT] = result
                         # empty the oldest link and make it the new root
-                        root = root[NEXT]
-                        del cache[root[KEY]]
+                        root = oldroot[NEXT]
+                        oldkey = root[KEY]
+                        oldvalue = root[RESULT]
                         root[KEY] = root[RESULT] = None
+                        # now update the cache dictionary for the new links
+                        del cache[oldkey]
+                        cache[key] = oldroot
                     else:
                         # put result in a new link at the front of the queue
                         last = root[PREV]
                         link = [last, root, key, result]
-                        cache[key] = last[NEXT] = root[PREV] = link
+                        last[NEXT] = root[PREV] = cache[key] = link
                         currsize += 1
                         full = (currsize == maxsize)
                     misses += 1