tokenize must parse under 3.x even if it won't run there

To support 3.x parent 2.x child
This commit is contained in:
David Wilson 2018-06-26 16:06:44 +01:00
parent 6b4e047017
commit 9e3e02fb65
1 changed files with 4 additions and 5 deletions

View File

@ -33,7 +33,6 @@ from token import *
import token
__all__ = [x for x in dir(token) if not x.startswith("_")]
__all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
del x
del token
COMMENT = N_TOKENS
@ -150,8 +149,8 @@ class StopTokenizing(Exception): pass
def printtoken(type, token, srow_scol, erow_ecol, line): # for testing
srow, scol = srow_scol
erow, ecol = erow_ecol
print "%d,%d-%d,%d:\t%s\t%s" % \
(srow, scol, erow, ecol, tok_name[type], repr(token))
print("%d,%d-%d,%d:\t%s\t%s" % \
(srow, scol, erow, ecol, tok_name[type], repr(token)))
def tokenize(readline, tokeneater=printtoken):
"""
@ -316,7 +315,7 @@ def generate_tokens(readline):
if contstr: # continued string
if not line:
raise TokenError, ("EOF in multi-line string", strstart)
raise TokenError("EOF in multi-line string", strstart)
endmatch = endprog.match(line)
if endmatch:
pos = end = endmatch.end(0)
@ -377,7 +376,7 @@ def generate_tokens(readline):
else: # continued statement
if not line:
raise TokenError, ("EOF in multi-line statement", (lnum, 0))
raise TokenError("EOF in multi-line statement", (lnum, 0))
continued = 0
while pos < max: