From 11cb8135988ebe20631db08c60155d2a0b17c1b2 Mon Sep 17 00:00:00 2001 From: Tim Peters Date: Mon, 12 May 2003 19:29:36 +0000 Subject: [PATCH] Close the file after tokenizing it. Because the open file object was bound to a module global, the file object remained opened throughout the test suite run. --- Lib/test/test_tokenize.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index e3fbb15f86..22a1d90afb 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -3,7 +3,10 @@ import tokenize, os, sys if verbose: print 'starting...' -file = open(findfile('tokenize_tests'+os.extsep+'py')) -tokenize.tokenize(file.readline) + +f = file(findfile('tokenize_tests'+os.extsep+'py')) +tokenize.tokenize(f.readline) +f.close() + if verbose: print 'finished' -- 2.50.1