""")
self.check_tokenize("if False:\n"
" # NL\n"
+ " \n"
" True = False # NEWLINE\n", """\
NAME 'if' (1, 0) (1, 2)
NAME 'False' (1, 3) (1, 8)
NEWLINE '\\n' (1, 9) (1, 10)
COMMENT '# NL' (2, 4) (2, 8)
NL '\\n' (2, 8) (2, 9)
- INDENT ' ' (3, 0) (3, 4)
- NAME 'True' (3, 4) (3, 8)
- OP '=' (3, 9) (3, 10)
- NAME 'False' (3, 11) (3, 16)
- COMMENT '# NEWLINE' (3, 17) (3, 26)
- NEWLINE '\\n' (3, 26) (3, 27)
- DEDENT '' (4, 0) (4, 0)
+ NL '\\n' (3, 4) (3, 5)
+ INDENT ' ' (4, 0) (4, 4)
+ NAME 'True' (4, 4) (4, 8)
+ OP '=' (4, 9) (4, 10)
+ NAME 'False' (4, 11) (4, 16)
+ COMMENT '# NEWLINE' (4, 17) (4, 26)
+ NEWLINE '\\n' (4, 26) (4, 27)
+ DEDENT '' (5, 0) (5, 0)
""")
indent_error_file = b"""\
def k(x):
if line[pos] in '#\r\n': # skip comments or blank lines
if line[pos] == '#':
comment_token = line[pos:].rstrip('\r\n')
- nl_pos = pos + len(comment_token)
yield TokenInfo(COMMENT, comment_token,
(lnum, pos), (lnum, pos + len(comment_token)), line)
- yield TokenInfo(NL, line[nl_pos:],
- (lnum, nl_pos), (lnum, len(line)), line)
- else:
- yield TokenInfo((NL, COMMENT)[line[pos] == '#'], line[pos:],
+ pos += len(comment_token)
+
+ yield TokenInfo(NL, line[pos:],
(lnum, pos), (lnum, len(line)), line)
continue