mirror of
https://github.com/python/cpython.git
synced 2024-12-16 13:25:18 +08:00
#16152: fix tokenize to ignore whitespace at the end of the code when no newline is found. Patch by Ned Batchelder.
This commit is contained in:
parent
d7bae5e85a
commit
7d24b1698a
@ -550,6 +550,10 @@ Evil tabs
|
||||
NAME 'pass' (3, 9) (3, 13)
|
||||
DEDENT '' (4, 0) (4, 0)
|
||||
DEDENT '' (4, 0) (4, 0)
|
||||
|
||||
Pathological whitespace (http://bugs.python.org/issue16152)
|
||||
>>> dump_tokens("@ ")
|
||||
OP '@' (1, 0) (1, 1)
|
||||
"""
|
||||
|
||||
|
||||
|
@ -95,7 +95,7 @@ ContStr = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
|
||||
group("'", r'\\\r?\n'),
|
||||
r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
|
||||
group('"', r'\\\r?\n'))
|
||||
PseudoExtras = group(r'\\\r?\n', Comment, Triple)
|
||||
PseudoExtras = group(r'\\\r?\n|\Z', Comment, Triple)
|
||||
PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
|
||||
|
||||
tokenprog, pseudoprog, single3prog, double3prog = map(
|
||||
@ -362,6 +362,8 @@ def generate_tokens(readline):
|
||||
if pseudomatch: # scan for tokens
|
||||
start, end = pseudomatch.span(1)
|
||||
spos, epos, pos = (lnum, start), (lnum, end), end
|
||||
if start == end:
|
||||
continue
|
||||
token, initial = line[start:end], line[start]
|
||||
|
||||
if initial in numchars or \
|
||||
|
@ -64,6 +64,7 @@ Des Barry
|
||||
Ulf Bartelt
|
||||
Don Bashford
|
||||
Nick Bastin
|
||||
Ned Batchelder
|
||||
Jeff Bauer
|
||||
Mike Bayer
|
||||
Michael R Bax
|
||||
|
Loading…
Reference in New Issue
Block a user