From 845b14cc8ef2d95e72c97a788a1ffb31faeaa3a8 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Sun, 11 Jan 2015 12:48:17 +0200 Subject: [PATCH] Removed duplicated dict entries. --- Lib/tokenize.py | 1 - 1 file changed, 1 deletion(-) diff --git a/Lib/tokenize.py b/Lib/tokenize.py index 51da4847f2..0659c55a06 100644 --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -186,7 +186,6 @@ endpats = {"'": Single, '"': Double, "rB'''": Single3, 'rB"""': Double3, "RB'''": Single3, 'RB"""': Double3, "u'''": Single3, 'u"""': Double3, - "R'''": Single3, 'R"""': Double3, "U'''": Single3, 'U"""': Double3, 'r': None, 'R': None, 'b': None, 'B': None, 'u': None, 'U': None} -- 2.50.1