Track changes in tokenize.py

This commit is contained in:
Guido van Rossum 1998-04-03 19:56:40 +00:00
parent 18586f4c48
commit a50547e0c0

View File

@ -1,7 +1,7 @@
test_tokenize
1,0-1,35: COMMENT "# Tests for the 'tokenize' module.\012"
2,0-2,43: COMMENT '# Large bits stolen from test_grammar.py. \012'
3,0-3,1: NEWLINE '\012'
3,0-3,1: NL '\012'
4,0-4,11: COMMENT '# Comments\012'
5,0-5,3: STRING '"#"'
5,3-5,4: NEWLINE '\012'
@ -12,15 +12,15 @@ test_tokenize
10,4-10,10: COMMENT '# abc\012'
11,0-12,4: STRING "'''#\012#'''"
12,4-12,5: NEWLINE '\012'
13,0-13,1: NEWLINE '\012'
13,0-13,1: NL '\012'
14,0-14,1: NAME 'x'
14,2-14,3: OP '='
14,4-14,5: NUMBER '1'
14,7-14,8: COMMENT '#'
14,8-14,9: NEWLINE '\012'
15,0-15,1: NEWLINE '\012'
15,0-15,1: NL '\012'
16,0-16,25: COMMENT '# Balancing continuation\012'
17,0-17,1: NEWLINE '\012'
17,0-17,1: NL '\012'
18,0-18,1: NAME 'a'
18,2-18,3: OP '='
18,4-18,5: OP '('
@ -28,7 +28,7 @@ test_tokenize
18,6-18,7: OP ','
18,8-18,9: NUMBER '4'
18,9-18,10: OP ','
18,10-18,11: NEWLINE '\012'
18,10-18,11: NL '\012'
19,2-19,3: NUMBER '5'
19,3-19,4: OP ','
19,5-19,6: NUMBER '6'
@ -41,7 +41,7 @@ test_tokenize
20,6-20,7: OP ','
20,8-20,9: NUMBER '4'
20,9-20,10: OP ','
20,10-20,11: NEWLINE '\012'
20,10-20,11: NL '\012'
21,2-21,3: NUMBER '5'
21,3-21,4: OP ']'
21,4-21,5: NEWLINE '\012'
@ -52,7 +52,7 @@ test_tokenize
22,8-22,9: OP ':'
22,9-22,10: NUMBER '5'
22,10-22,11: OP ','
22,11-22,12: NEWLINE '\012'
22,11-22,12: NL '\012'
23,2-23,5: STRING "'b'"
23,5-23,6: OP ':'
23,6-23,7: NUMBER '6'
@ -74,24 +74,24 @@ test_tokenize
24,20-24,21: OP '-'
24,22-24,23: NAME 'a'
24,23-24,24: OP '['
24,24-24,25: NEWLINE '\012'
24,24-24,25: NL '\012'
25,3-25,4: NUMBER '3'
25,5-25,6: OP ']'
25,6-25,7: NEWLINE '\012'
25,6-25,7: NL '\012'
26,3-26,4: OP '-'
26,5-26,6: NAME 'x'
26,7-26,8: OP '+'
26,9-26,12: NAME 'len'
26,12-26,13: OP '('
26,13-26,14: OP '{'
26,14-26,15: NEWLINE '\012'
26,14-26,15: NL '\012'
27,3-27,4: OP '}'
27,4-27,5: NEWLINE '\012'
27,4-27,5: NL '\012'
28,4-28,5: OP ')'
28,5-28,6: NEWLINE '\012'
28,5-28,6: NL '\012'
29,2-29,3: OP ')'
29,3-29,4: NEWLINE '\012'
30,0-30,1: NEWLINE '\012'
30,0-30,1: NL '\012'
31,0-31,37: COMMENT '# Backslash means line continuation:\012'
32,0-32,1: NAME 'x'
32,2-32,3: OP '='
@ -99,13 +99,13 @@ test_tokenize
33,0-33,1: OP '+'
33,2-33,3: NUMBER '1'
33,3-33,4: NEWLINE '\012'
34,0-34,1: NEWLINE '\012'
34,0-34,1: NL '\012'
35,0-35,55: COMMENT '# Backslash does not means continuation in comments :\\\012'
36,0-36,1: NAME 'x'
36,2-36,3: OP '='
36,4-36,5: NUMBER '0'
36,5-36,6: NEWLINE '\012'
37,0-37,1: NEWLINE '\012'
37,0-37,1: NL '\012'
38,0-38,20: COMMENT '# Ordinary integers\012'
39,0-39,4: NUMBER '0xff'
39,5-39,7: OP '<>'
@ -136,7 +136,7 @@ test_tokenize
44,14-44,15: OP '-'
44,15-44,16: NUMBER '1'
44,16-44,17: NEWLINE '\012'
45,0-45,1: NEWLINE '\012'
45,0-45,1: NL '\012'
46,0-46,16: COMMENT '# Long integers\012'
47,0-47,1: NAME 'x'
47,2-47,3: OP '='
@ -170,7 +170,7 @@ test_tokenize
54,2-54,3: OP '='
54,4-54,35: NUMBER '123456789012345678901234567890l'
54,35-54,36: NEWLINE '\012'
55,0-55,1: NEWLINE '\012'
55,0-55,1: NL '\012'
56,0-56,25: COMMENT '# Floating-point numbers\012'
57,0-57,1: NAME 'x'
57,2-57,3: OP '='
@ -217,7 +217,7 @@ test_tokenize
68,2-68,3: OP '='
68,4-68,9: NUMBER '3.1e4'
68,9-68,10: NEWLINE '\012'
69,0-69,1: NEWLINE '\012'
69,0-69,1: NL '\012'
70,0-70,18: COMMENT '# String literals\012'
71,0-71,1: NAME 'x'
71,2-71,3: OP '='
@ -309,7 +309,7 @@ test_tokenize
111,4-111,5: OP '+'
111,6-112,3: STRING "R'''spam\012'''"
112,3-112,4: NEWLINE '\012'
113,0-113,1: NEWLINE '\012'
113,0-113,1: NL '\012'
114,0-114,14: COMMENT '# Indentation\012'
115,0-115,2: NAME 'if'
115,3-115,4: NUMBER '1'
@ -381,9 +381,9 @@ test_tokenize
128,12-128,13: OP '='
128,14-128,15: NUMBER '2'
128,15-128,16: NEWLINE '\012'
129,0-129,1: NEWLINE '\012'
129,0-129,1: NL '\012'
130,0-130,12: COMMENT '# Operators\012'
131,0-131,1: NEWLINE '\012'
131,0-131,1: NL '\012'
132,0-132,0: DEDENT ''
132,0-132,0: DEDENT ''
132,0-132,0: DEDENT ''
@ -422,7 +422,7 @@ test_tokenize
133,28-133,29: OP ':'
133,30-133,34: NAME 'pass'
133,34-133,35: NEWLINE '\012'
134,0-134,1: NEWLINE '\012'
134,0-134,1: NL '\012'
135,0-135,1: OP '('
135,1-135,2: NAME 'x'
135,2-135,3: OP ','
@ -443,7 +443,7 @@ test_tokenize
135,26-135,27: OP '}'
135,27-135,28: OP ')'
135,28-135,29: NEWLINE '\012'
136,0-136,1: NEWLINE '\012'
136,0-136,1: NL '\012'
137,0-137,13: COMMENT '# comparison\012'
138,0-138,2: NAME 'if'
138,3-138,4: NUMBER '1'
@ -474,7 +474,7 @@ test_tokenize
138,65-138,66: OP ':'
138,67-138,71: NAME 'pass'
138,71-138,72: NEWLINE '\012'
139,0-139,1: NEWLINE '\012'
139,0-139,1: NL '\012'
140,0-140,9: COMMENT '# binary\012'
141,0-141,1: NAME 'x'
141,2-141,3: OP '='
@ -494,7 +494,7 @@ test_tokenize
143,6-143,7: OP '|'
143,8-143,9: NUMBER '1'
143,9-143,10: NEWLINE '\012'
144,0-144,1: NEWLINE '\012'
144,0-144,1: NL '\012'
145,0-145,8: COMMENT '# shift\012'
146,0-146,1: NAME 'x'
146,2-146,3: OP '='
@ -504,7 +504,7 @@ test_tokenize
146,11-146,13: OP '>>'
146,14-146,15: NUMBER '1'
146,15-146,16: NEWLINE '\012'
147,0-147,1: NEWLINE '\012'
147,0-147,1: NL '\012'
148,0-148,11: COMMENT '# additive\012'
149,0-149,1: NAME 'x'
149,2-149,3: OP '='
@ -518,7 +518,7 @@ test_tokenize
149,18-149,19: OP '+'
149,20-149,21: NUMBER '1'
149,21-149,22: NEWLINE '\012'
150,0-150,1: NEWLINE '\012'
150,0-150,1: NL '\012'
151,0-151,17: COMMENT '# multiplicative\012'
152,0-152,1: NAME 'x'
152,2-152,3: OP '='
@ -530,7 +530,7 @@ test_tokenize
152,14-152,15: OP '%'
152,16-152,17: NUMBER '1'
152,17-152,18: NEWLINE '\012'
153,0-153,1: NEWLINE '\012'
153,0-153,1: NL '\012'
154,0-154,8: COMMENT '# unary\012'
155,0-155,1: NAME 'x'
155,2-155,3: OP '='
@ -568,7 +568,7 @@ test_tokenize
156,23-156,24: OP '*'
156,24-156,25: NUMBER '1'
156,25-156,26: NEWLINE '\012'
157,0-157,1: NEWLINE '\012'
157,0-157,1: NL '\012'
158,0-158,11: COMMENT '# selector\012'
159,0-159,6: NAME 'import'
159,7-159,10: NAME 'sys'
@ -588,5 +588,5 @@ test_tokenize
160,28-160,29: OP '('
160,29-160,30: OP ')'
160,30-160,31: NEWLINE '\012'
161,0-161,1: NEWLINE '\012'
161,0-161,1: NL '\012'
162,0-162,0: ENDMARKER ''