]> granicus.if.org Git - python/commitdiff
Make test_tokenize really pass -- don't add extra output.
authorGuido van Rossum <guido@python.org>
Sat, 4 Aug 2007 17:55:43 +0000 (17:55 +0000)
committerGuido van Rossum <guido@python.org>
Sat, 4 Aug 2007 17:55:43 +0000 (17:55 +0000)
Lib/test/test_tokenize.py

index 788a04b989faeb2cb888980880b3ac56e1b733e1..9ef65639114558acf08ca0671437e00c94d6c8d1 100644 (file)
@@ -98,7 +98,7 @@ _PRINT_WORKING_MSG_INTERVAL = 5 * 60
 # and tokenized again from the latter.  The test fails if the second
 # tokenization doesn't match the first.
 def test_roundtrip(f):
-    ## print 'Testing:', f
+    ## print('Testing:', f)
     # Get the encoding first
     fobj = open(f, encoding="latin-1")
     first2lines = fobj.readline() + fobj.readline()
@@ -106,7 +106,7 @@ def test_roundtrip(f):
     m = re.search(r"coding:\s*(\S+)", first2lines)
     if m:
         encoding = m.group(1)
-        print("    coding:", encoding)
+        ## print("    coding:", encoding)
     else:
         encoding = "utf-8"
     fobj = open(f, encoding=encoding)