# Local imports
from ..patcomp import PatternCompiler
from .. import pygram
+from .util import does_tree_import
class BaseFix(object):
used_names = set() # A set of all used NAMEs
order = "post" # Does the fixer prefer pre- or post-order traversal
explicit = False # Is this ignored by refactor.py -f all?
+ run_order = 5 # Fixers will be sorted by run order before execution
+ # Lower numbers will be run first.
# Shortcut for access to Python grammar symbols
syms = pygram.python_symbols
filename - the name of the file the tree came from.
"""
pass
+
+
+class ConditionalFix(BaseFix):
+ """ Base class for fixers which not execute if an import is found. """
+
+ # This is the name of the import which, if found, will cause the test to be skipped
+ skip_on = None
+
+ def start_tree(self, *args):
+ super(ConditionalFix, self).start_tree(*args)
+ self._should_skip = None
+
+ def should_skip(self, node):
+ if self._should_skip is not None:
+ return self._should_skip
+ pkg = self.skip_on.split(".")
+ name = pkg[-1]
+ pkg = ".".join(pkg[:-1])
+ self._should_skip = does_tree_import(pkg, name, node)
+ return self._should_skip
# Local imports
from ..pgen2 import token
from . import basefix
-from .util import Name, Call, ListComp, does_tree_import, in_special_context
+from .util import Name, Call, ListComp, in_special_context
-class FixFilter(basefix.BaseFix):
+class FixFilter(basefix.ConditionalFix):
PATTERN = """
filter_lambda=power<
>
"""
- def start_tree(self, *args):
- super(FixFilter, self).start_tree(*args)
- self._new_filter = None
-
- def has_new_filter(self, node):
- if self._new_filter is not None:
- return self._new_filter
- self._new_filter = does_tree_import('future_builtins', 'filter', node)
- return self._new_filter
+ skip_on = "future_builtins.filter"
def transform(self, node, results):
- if self.has_new_filter(node):
- # If filter is imported from future_builtins, we don't want to
- # do anything here.
+ if self.should_skip(node):
return
if "filter_lambda" in results:
class FixFuture(basefix.BaseFix):
PATTERN = """import_from< 'from' module_name="__future__" 'import' any >"""
+ # This should be run last -- some things check for the import
+ run_order = 10
+
def transform(self, node, results):
- return BlankLine()
+ new = BlankLine()
+ new.prefix = node.get_prefix()
+ return new
--- /dev/null
+"""Fixer for import statements.
+If spam is being imported from the local directory, this import:
+ from spam import eggs
+Becomes:
+ from .spam import eggs
+
+And this import:
+ import spam
+Becomes:
+ import .spam
+"""
+
+# Local imports
+from . import basefix
+from os.path import dirname, join, exists, pathsep
+
+class FixImport(basefix.BaseFix):
+
+ PATTERN = """
+ import_from< 'from' imp=any 'import' any >
+ |
+ import_name< 'import' imp=any >
+ """
+
+ def transform(self, node, results):
+ imp = results['imp']
+
+ if unicode(imp).startswith('.'):
+ # Already a new-style import
+ return
+
+ if not probably_a_local_import(unicode(imp), self.filename):
+ # I guess this is a global import -- skip it!
+ return
+
+ # Some imps are top-level (eg: 'import ham')
+ # some are first level (eg: 'import ham.eggs')
+ # some are third level (eg: 'import ham.eggs as spam')
+ # Hence, the loop
+ while not hasattr(imp, 'value'):
+ imp = imp.children[0]
+
+ imp.value = "." + imp.value
+ node.changed()
+ return node
+
+def probably_a_local_import(imp_name, file_path):
+ # Must be stripped because the right space is included by the parser
+ imp_name = imp_name.split('.', 1)[0].strip()
+ base_path = dirname(file_path)
+ base_path = join(base_path, imp_name)
+ for ext in ['.py', pathsep, '.pyc', '.so', '.sl', '.pyd']:
+ if exists(base_path + ext):
+ return True
+ return False
""" Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and
itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363)
+ imports from itertools are fixed in fix_itertools_import.py
+
If itertools is imported as something else (ie: import itertools as it;
it.izip(spam, eggs)) method calls will not get fixed.
"""
power< func=%(it_funcs)s trailer< '(' [any] ')' > >
""" %(locals())
+ # Needs to be run after fix_(map|zip|filter)
+ run_order = 6
+
def transform(self, node, results):
prefix = None
func = results['func'][0]
--- /dev/null
+""" Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) """
+
+# Local imports
+from . import basefix
+from .util import BlankLine
+
+class FixItertoolsImports(basefix.BaseFix):
+ PATTERN = """
+ import_from< 'from' 'itertools' 'import' imports=any >
+ """ %(locals())
+
+ def transform(self, node, results):
+ imports = results['imports']
+ children = imports.children[:] or [imports]
+ for child in children:
+ if not hasattr(child, 'value'):
+ # Handle 'import ... as ...'
+ continue
+ if child.value in ('imap', 'izip', 'ifilter'):
+ child.remove()
+ elif child.value == 'ifilterfalse':
+ node.changed()
+ child.value = 'filterfalse'
+
+ # Make sure the import statement is still sane
+ children = imports.children[:] or [imports]
+ remove_comma = True
+ for child in children:
+ if remove_comma and getattr(child, 'value', None) == ',':
+ child.remove()
+ else:
+ remove_comma ^= True
+
+ if unicode(children[-1]) == ',':
+ children[-1].remove()
+
+ # If there is nothing left, return a blank line
+ if not (imports.children or getattr(imports, 'value', None)):
+ new = BlankLine()
+ new.prefix = node.get_prefix()
+ else:
+ new = node
+ return new
# Local imports
from ..pgen2 import token
from . import basefix
-from .util import Name, Call, ListComp, does_tree_import, in_special_context
+from .util import Name, Call, ListComp, in_special_context
from ..pygram import python_symbols as syms
-class FixMap(basefix.BaseFix):
+class FixMap(basefix.ConditionalFix):
PATTERN = """
map_none=power<
>
"""
- def start_tree(self, *args):
- super(FixMap, self).start_tree(*args)
- self._future_map_found = None
-
- def has_future_map(self, node):
- if self._future_map_found is not None:
- return self._future_map_found
- self._future_map_found = does_tree_import('future_builtins', 'map', node)
- return self._future_map_found
+ skip_on = 'future_builtins.map'
def transform(self, node, results):
- if self.has_future_map(node):
- # If a future map has been imported for this file, we won't
- # be making any modifications
+ if self.should_skip(node):
return
if node.parent.type == syms.simple_stmt:
'print ...' into 'print(...)'
'print ... ,' into 'print(..., end=" ")'
'print >>x, ...' into 'print(..., file=x)'
+
+No changes are applied if print_function is imported from __future__
+
"""
# Local imports
)
-class FixPrint(basefix.BaseFix):
+class FixPrint(basefix.ConditionalFix):
PATTERN = """
simple_stmt< bare='print' any > | print_stmt
"""
+ skip_on = '__future__.print_function'
+
def transform(self, node, results):
assert results
+
+ if self.should_skip(node):
+ return
+
bare_print = results.get("bare")
if bare_print:
# Local imports
from . import basefix
-from .util import Name, Call, does_tree_import, in_special_context
+from .util import Name, Call, in_special_context
-class FixZip(basefix.BaseFix):
+class FixZip(basefix.ConditionalFix):
PATTERN = """
power< 'zip' args=trailer< '(' [any] ')' >
>
"""
- def start_tree(self, *args):
- super(FixZip, self).start_tree(*args)
- self._future_zip_found = None
-
- def has_future_zip(self, node):
- if self._future_zip_found is not None:
- return self._future_zip_found
- self._future_zip_found = does_tree_import('future_builtins', 'zip', node)
- return self._future_zip_found
+ skip_on = "future_builtins.zip"
def transform(self, node, results):
- if self.has_future_zip(node):
- # If a future zip has been imported for this file, we won't
- # be making any modifications
+ if self.should_skip(node):
return
if in_special_context(node):
return None
+
new = node.clone()
new.set_prefix("")
new = Call(Name("list"), [new])
import sys
# Pgen imports
-from . import grammar, parse, token, tokenize
+from . import grammar, parse, token, tokenize, pgen
class Driver(object):
gp = head + tail + ".".join(map(str, sys.version_info)) + ".pickle"
if force or not _newer(gp, gt):
logger.info("Generating grammar tables from %s", gt)
- from pgen2 import pgen
g = pgen.generate_grammar(gt)
if save:
logger.info("Writing grammar tables to %s", gp)
# Licensed to PSF under a Contributor Agreement.
# Pgen imports
-from pgen2 import grammar, token, tokenize
+from . import grammar, token, tokenize
class PgenGrammar(grammar.Grammar):
pass
HUGE = 0x7FFFFFFF # maximum repeat count, default max
+_type_reprs = {}
+def type_repr(type_num):
+ global _type_reprs
+ if not _type_reprs:
+ from .pygram import python_symbols
+ # printing tokens is possible but not as useful
+ # from .pgen2 import token // token.__dict__.items():
+ for name, val in python_symbols.__dict__.items():
+ if type(val) == int: _type_reprs[val] = name
+ return _type_reprs.setdefault(type_num, type_num)
+
class Base(object):
def __repr__(self):
"""Returns a canonical string representation."""
- return "%s(%r, %r)" % (self.__class__.__name__,
- self.type,
+ return "%s(%s, %r)" % (self.__class__.__name__,
+ type_repr(self.type),
self.children)
def __str__(self):
return object.__new__(cls)
def __repr__(self):
- args = [self.type, self.content, self.name]
+ args = [type_repr(self.type), self.content, self.name]
while args and args[-1] is None:
del args[-1]
return "%s(%s)" % (self.__class__.__name__, ", ".join(map(repr, args)))
post_order_fixers.append(fixer)
else:
raise ValueError("Illegal fixer order: %r" % fixer.order)
+
+ pre_order_fixers.sort(key=lambda x: x.run_order)
+ post_order_fixers.sort(key=lambda x: x.run_order)
return (pre_order_fixers, post_order_fixers)
def log_error(self, msg, *args, **kwds):
# Python imports
import unittest
+from os.path import dirname, pathsep
# Local imports
from .. import pygram
options = Options(fix=[self.fixer], print_function=False)
self.refactor = refactor.RefactoringTool(options)
self.fixer_log = []
+ self.filename = "<string>"
for order in (self.refactor.pre_order, self.refactor.post_order):
for fixer in order:
def _check(self, before, after):
before = support.reformat(before)
after = support.reformat(after)
- tree = self.refactor.refactor_string(before, "<string>")
+ tree = self.refactor.refactor_string(before, self.filename)
self.failUnlessEqual(after, str(tree))
return tree
if not ignore_warnings:
self.failUnlessEqual(self.fixer_log, [])
+ def assert_runs_after(self, *names):
+ fix = [self.fixer]
+ fix.extend(names)
+ options = Options(fix=fix, print_function=False)
+ r = refactor.RefactoringTool(options)
+ (pre, post) = r.get_fixers()
+ n = "fix_" + self.fixer
+ if post and post[-1].__class__.__module__.endswith(n):
+ # We're the last fixer to run
+ return
+ if pre and pre[-1].__class__.__module__.endswith(n) and not post:
+ # We're the last in pre and post is empty
+ return
+ self.fail("Fixer run order (%s) is incorrect; %s should be last."\
+ %(", ".join([x.__class__.__module__ for x in (pre+post)]), n))
class Test_ne(FixerTestCase):
fixer = "ne"
a = """print(file=sys.stderr)"""
self.check(b, a)
+ # With from __future__ import print_function
+ def test_with_future_print_function(self):
+ # XXX: These tests won't actually do anything until the parser
+ # is fixed so it won't crash when it sees print(x=y).
+ # When #2412 is fixed, the try/except block can be taken
+ # out and the tests can be run like normal.
+ try:
+ s = "from __future__ import print_function\n"\
+ "print('Hai!', end=' ')"
+ self.unchanged(s)
+
+ b = "print 'Hello, world!'"
+ a = "print('Hello, world!')"
+ self.check(b, a)
+
+ s = "from __future__ import *\n"\
+ "print('Hai!', end=' ')"
+ self.unchanged(s)
+ except:
+ return
+ else:
+ self.assertFalse(True, "#2421 has been fixed -- printing tests "\
+ "need to be updated!")
class Test_exec(FixerTestCase):
fixer = "exec"
s = """exec(code, ns1, ns2)"""
self.unchanged(s)
-
class Test_repr(FixerTestCase):
fixer = "repr"
pass"""
self.unchanged(s)
-
class Test_raise(FixerTestCase):
fixer = "raise"
b = 6"""
self.check(b, a)
-
class Test_throw(FixerTestCase):
fixer = "throw"
b = 6"""
self.check(b, a)
-
class Test_long(FixerTestCase):
fixer = "long"
a = """x = int( x )"""
self.check(b, a)
-
class Test_dict(FixerTestCase):
fixer = "dict"
a = """for i in range(10):\n j=i"""
self.check(b, a)
-
class Test_raw_input(FixerTestCase):
fixer = "raw_input"
a = """x = input(foo(a) + 6)"""
self.check(b, a)
-
class Test_funcattrs(FixerTestCase):
fixer = "funcattrs"
s = "f(foo.__%s__.foo)" % attr
self.unchanged(s)
-
class Test_xreadlines(FixerTestCase):
fixer = "xreadlines"
s = "foo(xreadlines)"
self.unchanged(s)
-
class Test_imports(FixerTestCase):
fixer = "imports"
""" % (new, member, member, member)
self.check(b, a)
-
class Test_input(FixerTestCase):
fixer = "input"
a = """x = eval(input(foo(5) + 9))"""
self.check(b, a)
-
class Test_tuple_params(FixerTestCase):
fixer = "tuple_params"
s = "f(foo.__%s__.foo)" % attr
self.unchanged(s)
-
class Test_next(FixerTestCase):
fixer = "next"
""" % (mod, new, mod, new)
self.check(b, a)
-
class Test_unicode(FixerTestCase):
fixer = "unicode"
"""
self.unchanged(s)
-
class Test_basestring(FixerTestCase):
fixer = "basestring"
a = """isinstance(x, str)"""
self.check(b, a)
-
class Test_buffer(FixerTestCase):
fixer = "buffer"
a = """"""
self.check(b, a)
+ b = """# comment\nfrom __future__ import braces"""
+ a = """# comment\n"""
+ self.check(b, a)
+
+ b = """from __future__ import braces\n# comment"""
+ a = """\n# comment"""
+ self.check(b, a)
+
+ def test_run_order(self):
+ self.assert_runs_after('print')
+
class Test_itertools(FixerTestCase):
fixer = "itertools"
a = """ itertools.filterfalse(a, b)"""
self.check(b, a)
+ def test_run_order(self):
+ self.assert_runs_after('map', 'zip', 'filter')
+
+class Test_itertools_imports(FixerTestCase):
+ fixer = 'itertools_imports'
+
+ def test_reduced(self):
+ b = "from itertools import imap, izip, foo"
+ a = "from itertools import foo"
+ self.check(b, a)
+
+ b = "from itertools import bar, imap, izip, foo"
+ a = "from itertools import bar, foo"
+ self.check(b, a)
+
+ def test_comments(self):
+ b = "#foo\nfrom itertools import imap, izip"
+ a = "#foo\n"
+ self.check(b, a)
+
+ def test_none(self):
+ b = "from itertools import imap, izip"
+ a = ""
+ self.check(b, a)
+
+ def test_import_as(self):
+ b = "from itertools import izip, bar as bang, imap"
+ a = "from itertools import bar as bang"
+ self.check(b, a)
+
+ s = "from itertools import bar as bang"
+ self.unchanged(s)
+
+ def test_ifilter(self):
+ b = "from itertools import ifilterfalse"
+ a = "from itertools import filterfalse"
+ self.check(b, a)
+
+ b = "from itertools import imap, ifilterfalse, foo"
+ a = "from itertools import filterfalse, foo"
+ self.check(b, a)
+
+ b = "from itertools import bar, ifilterfalse, foo"
+ a = "from itertools import bar, filterfalse, foo"
+ self.check(b, a)
+
+
+ def test_unchanged(self):
+ s = "from itertools import foo"
+ self.unchanged(s)
+
+class Test_import(FixerTestCase):
+ fixer = "import"
+
+ def setUp(self):
+ FixerTestCase.setUp(self)
+ # Need to replace fix_import's isfile and isdir method
+ # so we can check that it's doing the right thing
+ self.files_checked = []
+ self.always_exists = True
+ def fake_exists(name):
+ self.files_checked.append(name)
+ return self.always_exists
+
+ from ..fixes import fix_import
+ fix_import.exists = fake_exists
+
+ def check_both(self, b, a):
+ self.always_exists = True
+ FixerTestCase.check(self, b, a)
+ self.always_exists = False
+ FixerTestCase.unchanged(self, b)
+
+ def test_files_checked(self):
+ def p(path):
+ # Takes a unix path and returns a path with correct separators
+ return pathsep.join(path.split("/"))
+
+ self.always_exists = False
+ expected_extensions = ('.py', pathsep, '.pyc', '.so', '.sl', '.pyd')
+ names_to_test = (p("/spam/eggs.py"), "ni.py", p("../../shrubbery.py"))
+
+ for name in names_to_test:
+ self.files_checked = []
+ self.filename = name
+ self.unchanged("import jam")
+
+ if dirname(name): name = dirname(name) + '/jam'
+ else: name = 'jam'
+ expected_checks = set(name + ext for ext in expected_extensions)
+
+ self.failUnlessEqual(set(self.files_checked), expected_checks)
+
+ def test_from(self):
+ b = "from foo import bar"
+ a = "from .foo import bar"
+ self.check_both(b, a)
+
+ def test_dotted_from(self):
+ b = "from green.eggs import ham"
+ a = "from .green.eggs import ham"
+ self.check_both(b, a)
+
+ def test_from_as(self):
+ b = "from green.eggs import ham as spam"
+ a = "from .green.eggs import ham as spam"
+ self.check_both(b, a)
+
+ def test_import(self):
+ b = "import foo"
+ a = "import .foo"
+ self.check_both(b, a)
+
+ def test_dotted_import(self):
+ b = "import foo.bar"
+ a = "import .foo.bar"
+ self.check_both(b, a)
+
+ def test_dotted_import_as(self):
+ b = "import foo.bar as bang"
+ a = "import .foo.bar as bang"
+ self.check_both(b, a)
+
if __name__ == "__main__":
import __main__