bash - tokeniser_test.go
1 package bash
2
3 import (
4 "testing"
5
6 "vimagination.zapto.org/parser"
7 )
8
9 func TestTokeniser(t *testing.T) {
10 for n, test := range [...]struct {
11 Input string
12 Output []parser.Token
13 }{
14 { // 1
15 "",
16 []parser.Token{
17 {Type: parser.TokenDone, Data: ""},
18 },
19 },
20 { // 2
21 " ",
22 []parser.Token{
23 {Type: TokenWhitespace, Data: " "},
24 {Type: parser.TokenDone, Data: ""},
25 },
26 },
27 { // 3
28 " \t\\\n",
29 []parser.Token{
30 {Type: TokenWhitespace, Data: " \t\\\n"},
31 {Type: parser.TokenDone, Data: ""},
32 },
33 },
34 { // 4
35 "\\\n \t",
36 []parser.Token{
37 {Type: TokenWhitespace, Data: "\\\n \t"},
38 {Type: parser.TokenDone, Data: ""},
39 },
40 },
41 { // 5
42 " \n\n \n",
43 []parser.Token{
44 {Type: TokenWhitespace, Data: " "},
45 {Type: TokenLineTerminator, Data: "\n\n"},
46 {Type: TokenWhitespace, Data: " "},
47 {Type: TokenLineTerminator, Data: "\n"},
48 {Type: parser.TokenDone, Data: ""},
49 },
50 },
51 { // 6
52 "#A comment\n# B comment",
53 []parser.Token{
54 {Type: TokenComment, Data: "#A comment"},
55 {Type: TokenLineTerminator, Data: "\n"},
56 {Type: TokenComment, Data: "# B comment"},
57 {Type: parser.TokenDone, Data: ""},
58 },
59 },
60 { // 7
61 "$ident $name a\\nbc=a $0 $12 a$b a${b}c $$ $! $?",
62 []parser.Token{
63 {Type: TokenIdentifier, Data: "$ident"},
64 {Type: TokenWhitespace, Data: " "},
65 {Type: TokenIdentifier, Data: "$name"},
66 {Type: TokenWhitespace, Data: " "},
67 {Type: TokenWord, Data: "a\\nbc=a"},
68 {Type: TokenWhitespace, Data: " "},
69 {Type: TokenIdentifier, Data: "$0"},
70 {Type: TokenWhitespace, Data: " "},
71 {Type: TokenIdentifier, Data: "$1"},
72 {Type: TokenWord, Data: "2"},
73 {Type: TokenWhitespace, Data: " "},
74 {Type: TokenWord, Data: "a"},
75 {Type: TokenIdentifier, Data: "$b"},
76 {Type: TokenWhitespace, Data: " "},
77 {Type: TokenWord, Data: "a"},
78 {Type: TokenPunctuator, Data: "${"},
79 {Type: TokenIdentifier, Data: "b"},
80 {Type: TokenPunctuator, Data: "}"},
81 {Type: TokenWord, Data: "c"},
82 {Type: TokenWhitespace, Data: " "},
83 {Type: TokenIdentifier, Data: "$$"},
84 {Type: TokenWhitespace, Data: " "},
85 {Type: TokenIdentifier, Data: "$!"},
86 {Type: TokenWhitespace, Data: " "},
87 {Type: TokenIdentifier, Data: "$?"},
88 {Type: parser.TokenDone, Data: ""},
89 },
90 },
91 { // 8
92 "abc=a def[0]=b ghi[$i]=c jkl+=d",
93 []parser.Token{
94 {Type: TokenIdentifierAssign, Data: "abc"},
95 {Type: TokenPunctuator, Data: "="},
96 {Type: TokenWord, Data: "a"},
97 {Type: TokenWhitespace, Data: " "},
98 {Type: TokenIdentifierAssign, Data: "def"},
99 {Type: TokenPunctuator, Data: "["},
100 {Type: TokenNumberLiteral, Data: "0"},
101 {Type: TokenPunctuator, Data: "]"},
102 {Type: TokenPunctuator, Data: "="},
103 {Type: TokenWord, Data: "b"},
104 {Type: TokenWhitespace, Data: " "},
105 {Type: TokenIdentifierAssign, Data: "ghi"},
106 {Type: TokenPunctuator, Data: "["},
107 {Type: TokenIdentifier, Data: "$i"},
108 {Type: TokenPunctuator, Data: "]"},
109 {Type: TokenPunctuator, Data: "="},
110 {Type: TokenWord, Data: "c"},
111 {Type: TokenWhitespace, Data: " "},
112 {Type: TokenIdentifierAssign, Data: "jkl"},
113 {Type: TokenPunctuator, Data: "+="},
114 {Type: TokenWord, Data: "d"},
115 {Type: parser.TokenDone, Data: ""},
116 },
117 },
118 { // 9
119 "ident ${name} ab\\nc=a ${6} a$ ",
120 []parser.Token{
121 {Type: TokenWord, Data: "ident"},
122 {Type: TokenWhitespace, Data: " "},
123 {Type: TokenPunctuator, Data: "${"},
124 {Type: TokenIdentifier, Data: "name"},
125 {Type: TokenPunctuator, Data: "}"},
126 {Type: TokenWhitespace, Data: " "},
127 {Type: TokenWord, Data: "ab\\nc=a"},
128 {Type: TokenWhitespace, Data: " "},
129 {Type: TokenPunctuator, Data: "${"},
130 {Type: TokenNumberLiteral, Data: "6"},
131 {Type: TokenPunctuator, Data: "}"},
132 {Type: TokenWhitespace, Data: " "},
133 {Type: TokenWord, Data: "a$"},
134 {Type: TokenWhitespace, Data: " "},
135 {Type: parser.TokenDone, Data: ""},
136 },
137 },
138 { // 10
139 "$(( 0 1 29 0xff 0xDeAdBeEf 0755 2#5 ))",
140 []parser.Token{
141 {Type: TokenPunctuator, Data: "$(("},
142 {Type: TokenWhitespace, Data: " "},
143 {Type: TokenNumberLiteral, Data: "0"},
144 {Type: TokenWhitespace, Data: " "},
145 {Type: TokenNumberLiteral, Data: "1"},
146 {Type: TokenWhitespace, Data: " "},
147 {Type: TokenNumberLiteral, Data: "29"},
148 {Type: TokenWhitespace, Data: " "},
149 {Type: TokenNumberLiteral, Data: "0xff"},
150 {Type: TokenWhitespace, Data: " "},
151 {Type: TokenNumberLiteral, Data: "0xDeAdBeEf"},
152 {Type: TokenWhitespace, Data: " "},
153 {Type: TokenNumberLiteral, Data: "0755"},
154 {Type: TokenWhitespace, Data: " "},
155 {Type: TokenNumberLiteral, Data: "2#5"},
156 {Type: TokenWhitespace, Data: " "},
157 {Type: TokenPunctuator, Data: "))"},
158 {Type: parser.TokenDone, Data: ""},
159 },
160 },
161 { // 11
162 "\"abc\" \"de\\nf\" \"stuff`command`more stuff\" \"text $ident $another end\" \"text $(command) end - text ${ident} end\" \"with\nnewline\" 'with\nnewline' $\"a string\" $'a \\'string'",
163 []parser.Token{
164 {Type: TokenString, Data: "\"abc\""},
165 {Type: TokenWhitespace, Data: " "},
166 {Type: TokenString, Data: "\"de\\nf\""},
167 {Type: TokenWhitespace, Data: " "},
168 {Type: TokenStringStart, Data: "\"stuff"},
169 {Type: TokenOpenBacktick, Data: "`"},
170 {Type: TokenWord, Data: "command"},
171 {Type: TokenCloseBacktick, Data: "`"},
172 {Type: TokenStringEnd, Data: "more stuff\""},
173 {Type: TokenWhitespace, Data: " "},
174 {Type: TokenStringStart, Data: "\"text "},
175 {Type: TokenIdentifier, Data: "$ident"},
176 {Type: TokenStringMid, Data: " "},
177 {Type: TokenIdentifier, Data: "$another"},
178 {Type: TokenStringEnd, Data: " end\""},
179 {Type: TokenWhitespace, Data: " "},
180 {Type: TokenStringStart, Data: "\"text "},
181 {Type: TokenPunctuator, Data: "$("},
182 {Type: TokenWord, Data: "command"},
183 {Type: TokenPunctuator, Data: ")"},
184 {Type: TokenStringMid, Data: " end - text "},
185 {Type: TokenPunctuator, Data: "${"},
186 {Type: TokenIdentifier, Data: "ident"},
187 {Type: TokenPunctuator, Data: "}"},
188 {Type: TokenStringEnd, Data: " end\""},
189 {Type: TokenWhitespace, Data: " "},
190 {Type: TokenString, Data: "\"with\nnewline\""},
191 {Type: TokenWhitespace, Data: " "},
192 {Type: TokenString, Data: "'with\nnewline'"},
193 {Type: TokenWhitespace, Data: " "},
194 {Type: TokenString, Data: "$\"a string\""},
195 {Type: TokenWhitespace, Data: " "},
196 {Type: TokenString, Data: "$'a \\'string'"},
197 {Type: parser.TokenDone, Data: ""},
198 },
199 },
200 { // 12
201 "< <<< <& <> > >> >& &>> >| | |& || & && {} + = += `` $() $(()) (())",
202 []parser.Token{
203 {Type: TokenPunctuator, Data: "<"},
204 {Type: TokenWhitespace, Data: " "},
205 {Type: TokenPunctuator, Data: "<<<"},
206 {Type: TokenWhitespace, Data: " "},
207 {Type: TokenPunctuator, Data: "<&"},
208 {Type: TokenWhitespace, Data: " "},
209 {Type: TokenPunctuator, Data: "<>"},
210 {Type: TokenWhitespace, Data: " "},
211 {Type: TokenPunctuator, Data: ">"},
212 {Type: TokenWhitespace, Data: " "},
213 {Type: TokenPunctuator, Data: ">>"},
214 {Type: TokenWhitespace, Data: " "},
215 {Type: TokenPunctuator, Data: ">&"},
216 {Type: TokenWhitespace, Data: " "},
217 {Type: TokenPunctuator, Data: "&>>"},
218 {Type: TokenWhitespace, Data: " "},
219 {Type: TokenPunctuator, Data: ">|"},
220 {Type: TokenWhitespace, Data: " "},
221 {Type: TokenPunctuator, Data: "|"},
222 {Type: TokenWhitespace, Data: " "},
223 {Type: TokenPunctuator, Data: "|&"},
224 {Type: TokenWhitespace, Data: " "},
225 {Type: TokenPunctuator, Data: "||"},
226 {Type: TokenWhitespace, Data: " "},
227 {Type: TokenPunctuator, Data: "&"},
228 {Type: TokenWhitespace, Data: " "},
229 {Type: TokenPunctuator, Data: "&&"},
230 {Type: TokenWhitespace, Data: " "},
231 {Type: TokenPunctuator, Data: "{"},
232 {Type: TokenPunctuator, Data: "}"},
233 {Type: TokenWhitespace, Data: " "},
234 {Type: TokenWord, Data: "+"},
235 {Type: TokenWhitespace, Data: " "},
236 {Type: TokenWord, Data: "="},
237 {Type: TokenWhitespace, Data: " "},
238 {Type: TokenWord, Data: "+="},
239 {Type: TokenWhitespace, Data: " "},
240 {Type: TokenOpenBacktick, Data: "`"},
241 {Type: TokenCloseBacktick, Data: "`"},
242 {Type: TokenWhitespace, Data: " "},
243 {Type: TokenPunctuator, Data: "$("},
244 {Type: TokenPunctuator, Data: ")"},
245 {Type: TokenWhitespace, Data: " "},
246 {Type: TokenPunctuator, Data: "$(("},
247 {Type: TokenPunctuator, Data: "))"},
248 {Type: TokenWhitespace, Data: " "},
249 {Type: parser.TokenError, Data: "invalid character"},
250 },
251 },
252 { // 13
253 "$(( + += - -= & &= | |= < <= > >= = == ! != * *= ** / /= % %= ^ ^= ~ ? : , (1) ))",
254 []parser.Token{
255 {Type: TokenPunctuator, Data: "$(("},
256 {Type: TokenWhitespace, Data: " "},
257 {Type: TokenPunctuator, Data: "+"},
258 {Type: TokenWhitespace, Data: " "},
259 {Type: TokenPunctuator, Data: "+="},
260 {Type: TokenWhitespace, Data: " "},
261 {Type: TokenPunctuator, Data: "-"},
262 {Type: TokenWhitespace, Data: " "},
263 {Type: TokenPunctuator, Data: "-="},
264 {Type: TokenWhitespace, Data: " "},
265 {Type: TokenPunctuator, Data: "&"},
266 {Type: TokenWhitespace, Data: " "},
267 {Type: TokenPunctuator, Data: "&="},
268 {Type: TokenWhitespace, Data: " "},
269 {Type: TokenPunctuator, Data: "|"},
270 {Type: TokenWhitespace, Data: " "},
271 {Type: TokenPunctuator, Data: "|="},
272 {Type: TokenWhitespace, Data: " "},
273 {Type: TokenPunctuator, Data: "<"},
274 {Type: TokenWhitespace, Data: " "},
275 {Type: TokenPunctuator, Data: "<="},
276 {Type: TokenWhitespace, Data: " "},
277 {Type: TokenPunctuator, Data: ">"},
278 {Type: TokenWhitespace, Data: " "},
279 {Type: TokenPunctuator, Data: ">="},
280 {Type: TokenWhitespace, Data: " "},
281 {Type: TokenPunctuator, Data: "="},
282 {Type: TokenWhitespace, Data: " "},
283 {Type: TokenPunctuator, Data: "=="},
284 {Type: TokenWhitespace, Data: " "},
285 {Type: TokenPunctuator, Data: "!"},
286 {Type: TokenWhitespace, Data: " "},
287 {Type: TokenPunctuator, Data: "!="},
288 {Type: TokenWhitespace, Data: " "},
289 {Type: TokenPunctuator, Data: "*"},
290 {Type: TokenWhitespace, Data: " "},
291 {Type: TokenPunctuator, Data: "*="},
292 {Type: TokenWhitespace, Data: " "},
293 {Type: TokenPunctuator, Data: "**"},
294 {Type: TokenWhitespace, Data: " "},
295 {Type: TokenPunctuator, Data: "/"},
296 {Type: TokenWhitespace, Data: " "},
297 {Type: TokenPunctuator, Data: "/="},
298 {Type: TokenWhitespace, Data: " "},
299 {Type: TokenPunctuator, Data: "%"},
300 {Type: TokenWhitespace, Data: " "},
301 {Type: TokenPunctuator, Data: "%="},
302 {Type: TokenWhitespace, Data: " "},
303 {Type: TokenPunctuator, Data: "^"},
304 {Type: TokenWhitespace, Data: " "},
305 {Type: TokenPunctuator, Data: "^="},
306 {Type: TokenWhitespace, Data: " "},
307 {Type: TokenPunctuator, Data: "~"},
308 {Type: TokenWhitespace, Data: " "},
309 {Type: TokenPunctuator, Data: "?"},
310 {Type: TokenWhitespace, Data: " "},
311 {Type: TokenPunctuator, Data: ":"},
312 {Type: TokenWhitespace, Data: " "},
313 {Type: TokenPunctuator, Data: ","},
314 {Type: TokenWhitespace, Data: " "},
315 {Type: TokenPunctuator, Data: "("},
316 {Type: TokenNumberLiteral, Data: "1"},
317 {Type: TokenPunctuator, Data: ")"},
318 {Type: TokenWhitespace, Data: " "},
319 {Type: TokenPunctuator, Data: "))"},
320 {Type: parser.TokenDone, Data: ""},
321 },
322 },
323 { // 14
324 "$(( a+b 1+2))",
325 []parser.Token{
326 {Type: TokenPunctuator, Data: "$(("},
327 {Type: TokenWhitespace, Data: " "},
328 {Type: TokenWord, Data: "a"},
329 {Type: TokenPunctuator, Data: "+"},
330 {Type: TokenWord, Data: "b"},
331 {Type: TokenWhitespace, Data: " "},
332 {Type: TokenNumberLiteral, Data: "1"},
333 {Type: TokenPunctuator, Data: "+"},
334 {Type: TokenNumberLiteral, Data: "2"},
335 {Type: TokenPunctuator, Data: "))"},
336 {Type: parser.TokenDone, Data: ""},
337 },
338 },
339 { // 15
340 "(( a+b 1+2))",
341 []parser.Token{
342 {Type: TokenPunctuator, Data: "(("},
343 {Type: TokenWhitespace, Data: " "},
344 {Type: TokenWord, Data: "a"},
345 {Type: TokenPunctuator, Data: "+"},
346 {Type: TokenWord, Data: "b"},
347 {Type: TokenWhitespace, Data: " "},
348 {Type: TokenNumberLiteral, Data: "1"},
349 {Type: TokenPunctuator, Data: "+"},
350 {Type: TokenNumberLiteral, Data: "2"},
351 {Type: TokenPunctuator, Data: "))"},
352 {Type: parser.TokenDone, Data: ""},
353 },
354 },
355 { // 16
356 "$(( ( ))",
357 []parser.Token{
358 {Type: TokenPunctuator, Data: "$(("},
359 {Type: TokenWhitespace, Data: " "},
360 {Type: TokenPunctuator, Data: "("},
361 {Type: TokenWhitespace, Data: " "},
362 {Type: TokenPunctuator, Data: ")"},
363 {Type: parser.TokenError, Data: "invalid character"},
364 },
365 },
366 { // 17
367 "$(( ? ))",
368 []parser.Token{
369 {Type: TokenPunctuator, Data: "$(("},
370 {Type: TokenWhitespace, Data: " "},
371 {Type: TokenPunctuator, Data: "?"},
372 {Type: TokenWhitespace, Data: " "},
373 {Type: parser.TokenError, Data: "invalid character"},
374 },
375 },
376 { // 18
377 "$(( ] ))",
378 []parser.Token{
379 {Type: TokenPunctuator, Data: "$(("},
380 {Type: TokenWhitespace, Data: " "},
381 {Type: parser.TokenError, Data: "invalid character"},
382 },
383 },
384 { // 19
385 "{ )",
386 []parser.Token{
387 {Type: TokenPunctuator, Data: "{"},
388 {Type: TokenWhitespace, Data: " "},
389 {Type: parser.TokenError, Data: "invalid character"},
390 },
391 },
392 { // 20
393 "(",
394 []parser.Token{
395 {Type: TokenPunctuator, Data: "("},
396 {Type: parser.TokenError, Data: "unexpected EOF"},
397 },
398 },
399 { // 21
400 "a b(",
401 []parser.Token{
402 {Type: TokenWord, Data: "a"},
403 {Type: TokenWhitespace, Data: " "},
404 {Type: TokenWord, Data: "b"},
405 {Type: parser.TokenError, Data: "invalid character"},
406 },
407 },
408 { // 22
409 "$(",
410 []parser.Token{
411 {Type: TokenPunctuator, Data: "$("},
412 {Type: parser.TokenError, Data: "unexpected EOF"},
413 },
414 },
415 { // 23
416 "$(}",
417 []parser.Token{
418 {Type: TokenPunctuator, Data: "$("},
419 {Type: TokenPunctuator, Data: "}"},
420 {Type: parser.TokenError, Data: "unexpected EOF"},
421 },
422 },
423 { // 24
424 "<<abc\n123\n456\nabc",
425 []parser.Token{
426 {Type: TokenPunctuator, Data: "<<"},
427 {Type: TokenWord, Data: "abc"},
428 {Type: TokenLineTerminator, Data: "\n"},
429 {Type: TokenHeredoc, Data: "123\n456\n"},
430 {Type: TokenHeredocEnd, Data: "abc"},
431 {Type: parser.TokenDone, Data: ""},
432 },
433 },
434 { // 25
435 "<< abc\n123\n456\nabc",
436 []parser.Token{
437 {Type: TokenPunctuator, Data: "<<"},
438 {Type: TokenWhitespace, Data: " "},
439 {Type: TokenWord, Data: "abc"},
440 {Type: TokenLineTerminator, Data: "\n"},
441 {Type: TokenHeredoc, Data: "123\n456\n"},
442 {Type: TokenHeredocEnd, Data: "abc"},
443 {Type: parser.TokenDone, Data: ""},
444 },
445 },
446 { // 26
447 "<<-abc\n123\n456\nabc",
448 []parser.Token{
449 {Type: TokenPunctuator, Data: "<<-"},
450 {Type: TokenWord, Data: "abc"},
451 {Type: TokenLineTerminator, Data: "\n"},
452 {Type: TokenHeredoc, Data: "123\n456\n"},
453 {Type: TokenHeredocEnd, Data: "abc"},
454 {Type: parser.TokenDone, Data: ""},
455 },
456 },
457 { // 27
458 "<<-abc\n\t123\n\t\t456\n\t\t\tabc",
459 []parser.Token{
460 {Type: TokenPunctuator, Data: "<<-"},
461 {Type: TokenWord, Data: "abc"},
462 {Type: TokenLineTerminator, Data: "\n"},
463 {Type: TokenHeredocIndent, Data: "\t"},
464 {Type: TokenHeredoc, Data: "123\n"},
465 {Type: TokenHeredocIndent, Data: "\t\t"},
466 {Type: TokenHeredoc, Data: "456\n"},
467 {Type: TokenHeredocIndent, Data: "\t\t\t"},
468 {Type: TokenHeredocEnd, Data: "abc"},
469 {Type: parser.TokenDone, Data: ""},
470 },
471 },
472 { // 28
473 "<<a'b 'c\n123\n456\nab c\n",
474 []parser.Token{
475 {Type: TokenPunctuator, Data: "<<"},
476 {Type: TokenWord, Data: "a'b 'c"},
477 {Type: TokenLineTerminator, Data: "\n"},
478 {Type: TokenHeredoc, Data: "123\n456\n"},
479 {Type: TokenHeredocEnd, Data: "ab c"},
480 {Type: TokenLineTerminator, Data: "\n"},
481 {Type: parser.TokenDone, Data: ""},
482 },
483 },
484 { // 29
485 "<<def\n123\n456\ndef\nabc",
486 []parser.Token{
487 {Type: TokenPunctuator, Data: "<<"},
488 {Type: TokenWord, Data: "def"},
489 {Type: TokenLineTerminator, Data: "\n"},
490 {Type: TokenHeredoc, Data: "123\n456\n"},
491 {Type: TokenHeredocEnd, Data: "def"},
492 {Type: TokenLineTerminator, Data: "\n"},
493 {Type: TokenWord, Data: "abc"},
494 {Type: parser.TokenDone, Data: ""},
495 },
496 },
497 { // 30
498 "<<def cat\n123\n456\ndef\nabc",
499 []parser.Token{
500 {Type: TokenPunctuator, Data: "<<"},
501 {Type: TokenWord, Data: "def"},
502 {Type: TokenWhitespace, Data: " "},
503 {Type: TokenWord, Data: "cat"},
504 {Type: TokenLineTerminator, Data: "\n"},
505 {Type: TokenHeredoc, Data: "123\n456\n"},
506 {Type: TokenHeredocEnd, Data: "def"},
507 {Type: TokenLineTerminator, Data: "\n"},
508 {Type: TokenWord, Data: "abc"},
509 {Type: parser.TokenDone, Data: ""},
510 },
511 },
512 { // 31
513 "<<abc cat;<<def cat\n123\nabc\n456\ndef",
514 []parser.Token{
515 {Type: TokenPunctuator, Data: "<<"},
516 {Type: TokenWord, Data: "abc"},
517 {Type: TokenWhitespace, Data: " "},
518 {Type: TokenWord, Data: "cat"},
519 {Type: TokenPunctuator, Data: ";"},
520 {Type: TokenPunctuator, Data: "<<"},
521 {Type: TokenWord, Data: "def"},
522 {Type: TokenWhitespace, Data: " "},
523 {Type: TokenWord, Data: "cat"},
524 {Type: TokenLineTerminator, Data: "\n"},
525 {Type: TokenHeredoc, Data: "123\n"},
526 {Type: TokenHeredocEnd, Data: "abc"},
527 {Type: TokenLineTerminator, Data: "\n"},
528 {Type: TokenHeredoc, Data: "456\n"},
529 {Type: TokenHeredocEnd, Data: "def"},
530 {Type: parser.TokenDone, Data: ""},
531 },
532 },
533 { // 32
534 "<<abc cat;echo $(<<def cat\n456\ndef\n)\n123\nabc",
535 []parser.Token{
536 {Type: TokenPunctuator, Data: "<<"},
537 {Type: TokenWord, Data: "abc"},
538 {Type: TokenWhitespace, Data: " "},
539 {Type: TokenWord, Data: "cat"},
540 {Type: TokenPunctuator, Data: ";"},
541 {Type: TokenWord, Data: "echo"},
542 {Type: TokenWhitespace, Data: " "},
543 {Type: TokenPunctuator, Data: "$("},
544 {Type: TokenPunctuator, Data: "<<"},
545 {Type: TokenWord, Data: "def"},
546 {Type: TokenWhitespace, Data: " "},
547 {Type: TokenWord, Data: "cat"},
548 {Type: TokenLineTerminator, Data: "\n"},
549 {Type: TokenHeredoc, Data: "456\n"},
550 {Type: TokenHeredocEnd, Data: "def"},
551 {Type: TokenLineTerminator, Data: "\n"},
552 {Type: TokenPunctuator, Data: ")"},
553 {Type: TokenLineTerminator, Data: "\n"},
554 {Type: TokenHeredoc, Data: "123\n"},
555 {Type: TokenHeredocEnd, Data: "abc"},
556 {Type: parser.TokenDone, Data: ""},
557 },
558 },
559 { // 33
560 "<<abc\na$abc\nabc",
561 []parser.Token{
562 {Type: TokenPunctuator, Data: "<<"},
563 {Type: TokenWord, Data: "abc"},
564 {Type: TokenLineTerminator, Data: "\n"},
565 {Type: TokenHeredoc, Data: "a"},
566 {Type: TokenIdentifier, Data: "$abc"},
567 {Type: TokenHeredoc, Data: "\n"},
568 {Type: TokenHeredocEnd, Data: "abc"},
569 {Type: parser.TokenDone, Data: ""},
570 },
571 },
572 { // 34
573 "<<'abc'\na$abc\nabc",
574 []parser.Token{
575 {Type: TokenPunctuator, Data: "<<"},
576 {Type: TokenWord, Data: "'abc'"},
577 {Type: TokenLineTerminator, Data: "\n"},
578 {Type: TokenHeredoc, Data: "a$abc\n"},
579 {Type: TokenHeredocEnd, Data: "abc"},
580 {Type: parser.TokenDone, Data: ""},
581 },
582 },
583 { // 35
584 "<<\"\"abc\na$abc\nabc",
585 []parser.Token{
586 {Type: TokenPunctuator, Data: "<<"},
587 {Type: TokenWord, Data: "\"\"abc"},
588 {Type: TokenLineTerminator, Data: "\n"},
589 {Type: TokenHeredoc, Data: "a$abc\n"},
590 {Type: TokenHeredocEnd, Data: "abc"},
591 {Type: parser.TokenDone, Data: ""},
592 },
593 },
594 { // 36
595 "<<a\\ b\\ c\na$abc\na b c",
596 []parser.Token{
597 {Type: TokenPunctuator, Data: "<<"},
598 {Type: TokenWord, Data: "a\\ b\\ c"},
599 {Type: TokenLineTerminator, Data: "\n"},
600 {Type: TokenHeredoc, Data: "a$abc\n"},
601 {Type: TokenHeredocEnd, Data: "a b c"},
602 {Type: parser.TokenDone, Data: ""},
603 },
604 },
605 { // 37
606 "<<abc\na${abc} $99\nabc",
607 []parser.Token{
608 {Type: TokenPunctuator, Data: "<<"},
609 {Type: TokenWord, Data: "abc"},
610 {Type: TokenLineTerminator, Data: "\n"},
611 {Type: TokenHeredoc, Data: "a"},
612 {Type: TokenPunctuator, Data: "${"},
613 {Type: TokenIdentifier, Data: "abc"},
614 {Type: TokenPunctuator, Data: "}"},
615 {Type: TokenHeredoc, Data: " "},
616 {Type: TokenIdentifier, Data: "$9"},
617 {Type: TokenHeredoc, Data: "9\n"},
618 {Type: TokenHeredocEnd, Data: "abc"},
619 {Type: parser.TokenDone, Data: ""},
620 },
621 },
622 { // 38
623 "<<abc\na$(\necho abc;\n) 1\nabc",
624 []parser.Token{
625 {Type: TokenPunctuator, Data: "<<"},
626 {Type: TokenWord, Data: "abc"},
627 {Type: TokenLineTerminator, Data: "\n"},
628 {Type: TokenHeredoc, Data: "a"},
629 {Type: TokenPunctuator, Data: "$("},
630 {Type: TokenLineTerminator, Data: "\n"},
631 {Type: TokenWord, Data: "echo"},
632 {Type: TokenWhitespace, Data: " "},
633 {Type: TokenWord, Data: "abc"},
634 {Type: TokenPunctuator, Data: ";"},
635 {Type: TokenLineTerminator, Data: "\n"},
636 {Type: TokenPunctuator, Data: ")"},
637 {Type: TokenHeredoc, Data: " 1\n"},
638 {Type: TokenHeredocEnd, Data: "abc"},
639 {Type: parser.TokenDone, Data: ""},
640 },
641 },
642 { // 39
643 "<<abc\n$a\nabc",
644 []parser.Token{
645 {Type: TokenPunctuator, Data: "<<"},
646 {Type: TokenWord, Data: "abc"},
647 {Type: TokenLineTerminator, Data: "\n"},
648 {Type: TokenIdentifier, Data: "$a"},
649 {Type: TokenHeredoc, Data: "\n"},
650 {Type: TokenHeredocEnd, Data: "abc"},
651 {Type: parser.TokenDone, Data: ""},
652 },
653 },
654 { // 40
655 "<<abc\n$$\nabc",
656 []parser.Token{
657 {Type: TokenPunctuator, Data: "<<"},
658 {Type: TokenWord, Data: "abc"},
659 {Type: TokenLineTerminator, Data: "\n"},
660 {Type: TokenIdentifier, Data: "$$"},
661 {Type: TokenHeredoc, Data: "\n"},
662 {Type: TokenHeredocEnd, Data: "abc"},
663 {Type: parser.TokenDone, Data: ""},
664 },
665 },
666 { // 41
667 "<<abc\n$!\nabc",
668 []parser.Token{
669 {Type: TokenPunctuator, Data: "<<"},
670 {Type: TokenWord, Data: "abc"},
671 {Type: TokenLineTerminator, Data: "\n"},
672 {Type: TokenIdentifier, Data: "$!"},
673 {Type: TokenHeredoc, Data: "\n"},
674 {Type: TokenHeredocEnd, Data: "abc"},
675 {Type: parser.TokenDone, Data: ""},
676 },
677 },
678 { // 42
679 "<<abc\n$?\nabc",
680 []parser.Token{
681 {Type: TokenPunctuator, Data: "<<"},
682 {Type: TokenWord, Data: "abc"},
683 {Type: TokenLineTerminator, Data: "\n"},
684 {Type: TokenIdentifier, Data: "$?"},
685 {Type: TokenHeredoc, Data: "\n"},
686 {Type: TokenHeredocEnd, Data: "abc"},
687 {Type: parser.TokenDone, Data: ""},
688 },
689 },
690 { // 43
691 "<<abc\nabc",
692 []parser.Token{
693 {Type: TokenPunctuator, Data: "<<"},
694 {Type: TokenWord, Data: "abc"},
695 {Type: TokenLineTerminator, Data: "\n"},
696 {Type: TokenHeredocEnd, Data: "abc"},
697 {Type: parser.TokenDone, Data: ""},
698 },
699 },
700 { // 44
701 "<<abc\na$(<<def) 1\nabc",
702 []parser.Token{
703 {Type: TokenPunctuator, Data: "<<"},
704 {Type: TokenWord, Data: "abc"},
705 {Type: TokenLineTerminator, Data: "\n"},
706 {Type: TokenHeredoc, Data: "a"},
707 {Type: TokenPunctuator, Data: "$("},
708 {Type: TokenPunctuator, Data: "<<"},
709 {Type: TokenWord, Data: "def"},
710 {Type: parser.TokenError, Data: "invalid character"},
711 },
712 },
713 { // 45
714 "<<abc\na$(<<def cat) 1\nabc",
715 []parser.Token{
716 {Type: TokenPunctuator, Data: "<<"},
717 {Type: TokenWord, Data: "abc"},
718 {Type: TokenLineTerminator, Data: "\n"},
719 {Type: TokenHeredoc, Data: "a"},
720 {Type: TokenPunctuator, Data: "$("},
721 {Type: TokenPunctuator, Data: "<<"},
722 {Type: TokenWord, Data: "def"},
723 {Type: TokenWhitespace, Data: " "},
724 {Type: TokenWord, Data: "cat"},
725 {Type: parser.TokenError, Data: "invalid character"},
726 },
727 },
728 { // 46
729 "<<abc;$(<<def cat)\nabc\ndef\nabc",
730 []parser.Token{
731 {Type: TokenPunctuator, Data: "<<"},
732 {Type: TokenWord, Data: "abc"},
733 {Type: TokenPunctuator, Data: ";"},
734 {Type: TokenPunctuator, Data: "$("},
735 {Type: TokenPunctuator, Data: "<<"},
736 {Type: TokenWord, Data: "def"},
737 {Type: TokenWhitespace, Data: " "},
738 {Type: TokenWord, Data: "cat"},
739 {Type: parser.TokenError, Data: "invalid character"},
740 },
741 },
742 { // 47
743 "<<abc;<<def;$(<<ghi;<<jkl\nghi\njkl\n)\nabc\ndef",
744 []parser.Token{
745 {Type: TokenPunctuator, Data: "<<"},
746 {Type: TokenWord, Data: "abc"},
747 {Type: TokenPunctuator, Data: ";"},
748 {Type: TokenPunctuator, Data: "<<"},
749 {Type: TokenWord, Data: "def"},
750 {Type: TokenPunctuator, Data: ";"},
751 {Type: TokenPunctuator, Data: "$("},
752 {Type: TokenPunctuator, Data: "<<"},
753 {Type: TokenWord, Data: "ghi"},
754 {Type: TokenPunctuator, Data: ";"},
755 {Type: TokenPunctuator, Data: "<<"},
756 {Type: TokenWord, Data: "jkl"},
757 {Type: TokenLineTerminator, Data: "\n"},
758 {Type: TokenHeredocEnd, Data: "ghi"},
759 {Type: TokenLineTerminator, Data: "\n"},
760 {Type: TokenHeredocEnd, Data: "jkl"},
761 {Type: TokenLineTerminator, Data: "\n"},
762 {Type: TokenPunctuator, Data: ")"},
763 {Type: TokenLineTerminator, Data: "\n"},
764 {Type: TokenHeredocEnd, Data: "abc"},
765 {Type: TokenLineTerminator, Data: "\n"},
766 {Type: TokenHeredocEnd, Data: "def"},
767 {Type: parser.TokenDone, Data: ""},
768 },
769 },
770 { // 48
771 "<<a\\\nbc\nabc\ndef\na\nbc",
772 []parser.Token{
773 {Type: TokenPunctuator, Data: "<<"},
774 {Type: TokenWord, Data: "a\\\nbc"},
775 {Type: TokenLineTerminator, Data: "\n"},
776 {Type: TokenHeredoc, Data: "abc\ndef\n"},
777 {Type: TokenHeredocEnd, Data: "a\nbc"},
778 {Type: parser.TokenDone, Data: ""},
779 },
780 },
781 { // 49
782 "<<a;echo ${a/b/\n$c #not-a-comment $d}\n123\na",
783 []parser.Token{
784 {Type: TokenPunctuator, Data: "<<"},
785 {Type: TokenWord, Data: "a"},
786 {Type: TokenPunctuator, Data: ";"},
787 {Type: TokenWord, Data: "echo"},
788 {Type: TokenWhitespace, Data: " "},
789 {Type: TokenPunctuator, Data: "${"},
790 {Type: TokenIdentifier, Data: "a"},
791 {Type: TokenPunctuator, Data: "/"},
792 {Type: TokenPattern, Data: "b"},
793 {Type: TokenPunctuator, Data: "/"},
794 {Type: TokenLineTerminator, Data: "\n"},
795 {Type: TokenIdentifier, Data: "$c"},
796 {Type: TokenWhitespace, Data: " "},
797 {Type: TokenWord, Data: "#not-a-comment"},
798 {Type: TokenWhitespace, Data: " "},
799 {Type: TokenIdentifier, Data: "$d"},
800 {Type: TokenPunctuator, Data: "}"},
801 {Type: TokenLineTerminator, Data: "\n"},
802 {Type: TokenHeredoc, Data: "123\n"},
803 {Type: TokenHeredocEnd, Data: "a"},
804 {Type: parser.TokenDone, Data: ""},
805 },
806 },
807 { // 50
808 "2>1 word",
809 []parser.Token{
810 {Type: TokenNumberLiteral, Data: "2"},
811 {Type: TokenPunctuator, Data: ">"},
812 {Type: TokenWord, Data: "1"},
813 {Type: TokenWhitespace, Data: " "},
814 {Type: TokenWord, Data: "word"},
815 {Type: parser.TokenDone, Data: ""},
816 },
817 },
818 { // 51
819 "time -p cmd",
820 []parser.Token{
821 {Type: TokenKeyword, Data: "time"},
822 {Type: TokenWhitespace, Data: " "},
823 {Type: TokenWord, Data: "-p"},
824 {Type: TokenWhitespace, Data: " "},
825 {Type: TokenWord, Data: "cmd"},
826 {Type: parser.TokenDone, Data: ""},
827 },
828 },
829 { // 52
830 "time -p cmd if",
831 []parser.Token{
832 {Type: TokenKeyword, Data: "time"},
833 {Type: TokenWhitespace, Data: " "},
834 {Type: TokenWord, Data: "-p"},
835 {Type: TokenWhitespace, Data: " "},
836 {Type: TokenWord, Data: "cmd"},
837 {Type: TokenWhitespace, Data: " "},
838 {Type: TokenWord, Data: "if"},
839 {Type: parser.TokenDone, Data: ""},
840 },
841 },
842 { // 53
843 "time -p if a;then b;fi",
844 []parser.Token{
845 {Type: TokenKeyword, Data: "time"},
846 {Type: TokenWhitespace, Data: " "},
847 {Type: TokenWord, Data: "-p"},
848 {Type: TokenWhitespace, Data: " "},
849 {Type: TokenKeyword, Data: "if"},
850 {Type: TokenWhitespace, Data: " "},
851 {Type: TokenWord, Data: "a"},
852 {Type: TokenPunctuator, Data: ";"},
853 {Type: TokenKeyword, Data: "then"},
854 {Type: TokenWhitespace, Data: " "},
855 {Type: TokenWord, Data: "b"},
856 {Type: TokenPunctuator, Data: ";"},
857 {Type: TokenKeyword, Data: "fi"},
858 {Type: parser.TokenDone, Data: ""},
859 },
860 },
861 { // 54
862 "{a..b..2} {a,b,d} a{b,c,d}e a{1..4} {2..10..-1} {-1..-100..5} {a..z..-1}",
863 []parser.Token{
864 {Type: TokenBraceExpansion, Data: "{a..b..2}"},
865 {Type: TokenWhitespace, Data: " "},
866 {Type: TokenBraceExpansion, Data: "{a,b,d}"},
867 {Type: TokenWhitespace, Data: " "},
868 {Type: TokenWord, Data: "a"},
869 {Type: TokenBraceExpansion, Data: "{b,c,d}"},
870 {Type: TokenWord, Data: "e"},
871 {Type: TokenWhitespace, Data: " "},
872 {Type: TokenWord, Data: "a"},
873 {Type: TokenBraceExpansion, Data: "{1..4}"},
874 {Type: TokenWhitespace, Data: " "},
875 {Type: TokenBraceExpansion, Data: "{2..10..-1}"},
876 {Type: TokenWhitespace, Data: " "},
877 {Type: TokenBraceExpansion, Data: "{-1..-100..5}"},
878 {Type: TokenWhitespace, Data: " "},
879 {Type: TokenBraceExpansion, Data: "{a..z..-1}"},
880 {Type: parser.TokenDone, Data: ""},
881 },
882 },
883 { // 55
884 "{a..2}",
885 []parser.Token{
886 {Type: TokenWord, Data: "{a..2}"},
887 {Type: parser.TokenDone, Data: ""},
888 },
889 },
890 { // 56
891 "{a..b..c}",
892 []parser.Token{
893 {Type: TokenWord, Data: "{a..b..c}"},
894 {Type: parser.TokenDone, Data: ""},
895 },
896 },
897 { // 57
898 "{a..b2}",
899 []parser.Token{
900 {Type: TokenWord, Data: "{a..b2}"},
901 {Type: parser.TokenDone, Data: ""},
902 },
903 },
904 { // 58
905 "{_a,_b,_c}",
906 []parser.Token{
907 {Type: TokenBraceExpansion, Data: "{_a,_b,_c}"},
908 {Type: parser.TokenDone, Data: ""},
909 },
910 },
911 { // 59
912 "{1,2,3}",
913 []parser.Token{
914 {Type: TokenBraceExpansion, Data: "{1,2,3}"},
915 {Type: parser.TokenDone, Data: ""},
916 },
917 },
918 { // 60
919 "{1..}",
920 []parser.Token{
921 {Type: TokenWord, Data: "{1..}"},
922 {Type: parser.TokenDone, Data: ""},
923 },
924 },
925 { // 61
926 "{1..3..a}",
927 []parser.Token{
928 {Type: TokenWord, Data: "{1..3..a}"},
929 {Type: parser.TokenDone, Data: ""},
930 },
931 },
932 { // 62
933 "{1..3..1a}",
934 []parser.Token{
935 {Type: TokenWord, Data: "{1..3..1a}"},
936 {Type: parser.TokenDone, Data: ""},
937 },
938 },
939 { // 63
940 "{-",
941 []parser.Token{
942 {Type: TokenWord, Data: "{-"},
943 {Type: parser.TokenDone, Data: ""},
944 },
945 },
946 { // 64
947 "{-,_}",
948 []parser.Token{
949 {Type: TokenBraceExpansion, Data: "{-,_}"},
950 {Type: parser.TokenDone, Data: ""},
951 },
952 },
953 { // 65
954 "{-\\n}",
955 []parser.Token{
956 {Type: TokenWord, Data: "{-\\n}"},
957 {Type: parser.TokenDone, Data: ""},
958 },
959 },
960 { // 66
961 "{-,\\n}",
962 []parser.Token{
963 {Type: TokenBraceExpansion, Data: "{-,\\n}"},
964 {Type: parser.TokenDone, Data: ""},
965 },
966 },
967 { // 67
968 "a={123",
969 []parser.Token{
970 {Type: TokenIdentifierAssign, Data: "a"},
971 {Type: TokenPunctuator, Data: "="},
972 {Type: TokenWord, Data: "{123"},
973 {Type: parser.TokenDone, Data: ""},
974 },
975 },
976 { // 68
977 "word{ word{a} word{\nword{",
978 []parser.Token{
979 {Type: TokenWord, Data: "word{"},
980 {Type: TokenWhitespace, Data: " "},
981 {Type: TokenWord, Data: "word{a}"},
982 {Type: TokenWhitespace, Data: " "},
983 {Type: TokenWord, Data: "word{"},
984 {Type: TokenLineTerminator, Data: "\n"},
985 {Type: TokenWord, Data: "word{"},
986 {Type: parser.TokenDone, Data: ""},
987 },
988 },
989 { // 69
990 "{ echo 123; echo 456; }",
991 []parser.Token{
992 {Type: TokenPunctuator, Data: "{"},
993 {Type: TokenWhitespace, Data: " "},
994 {Type: TokenWord, Data: "echo"},
995 {Type: TokenWhitespace, Data: " "},
996 {Type: TokenWord, Data: "123"},
997 {Type: TokenPunctuator, Data: ";"},
998 {Type: TokenWhitespace, Data: " "},
999 {Type: TokenWord, Data: "echo"},
1000 {Type: TokenWhitespace, Data: " "},
1001 {Type: TokenWord, Data: "456"},
1002 {Type: TokenPunctuator, Data: ";"},
1003 {Type: TokenWhitespace, Data: " "},
1004 {Type: TokenPunctuator, Data: "}"},
1005 {Type: parser.TokenDone, Data: ""},
1006 },
1007 },
1008 { // 70
1009 "(echo 123; echo 456)",
1010 []parser.Token{
1011 {Type: TokenPunctuator, Data: "("},
1012 {Type: TokenWord, Data: "echo"},
1013 {Type: TokenWhitespace, Data: " "},
1014 {Type: TokenWord, Data: "123"},
1015 {Type: TokenPunctuator, Data: ";"},
1016 {Type: TokenWhitespace, Data: " "},
1017 {Type: TokenWord, Data: "echo"},
1018 {Type: TokenWhitespace, Data: " "},
1019 {Type: TokenWord, Data: "456"},
1020 {Type: TokenPunctuator, Data: ")"},
1021 {Type: parser.TokenDone, Data: ""},
1022 },
1023 },
1024 { // 71
1025 "`a` `echo \\`abc\\`` echo \"a`echo \"1\\`echo u\\\\\\`echo 123\\\\\\`v\\`3\"`c\"",
1026 []parser.Token{
1027 {Type: TokenOpenBacktick, Data: "`"},
1028 {Type: TokenWord, Data: "a"},
1029 {Type: TokenCloseBacktick, Data: "`"},
1030 {Type: TokenWhitespace, Data: " "},
1031 {Type: TokenOpenBacktick, Data: "`"},
1032 {Type: TokenWord, Data: "echo"},
1033 {Type: TokenWhitespace, Data: " "},
1034 {Type: TokenOpenBacktick, Data: "\\`"},
1035 {Type: TokenWord, Data: "abc"},
1036 {Type: TokenCloseBacktick, Data: "\\`"},
1037 {Type: TokenCloseBacktick, Data: "`"},
1038 {Type: TokenWhitespace, Data: " "},
1039 {Type: TokenWord, Data: "echo"},
1040 {Type: TokenWhitespace, Data: " "},
1041 {Type: TokenStringStart, Data: "\"a"},
1042 {Type: TokenOpenBacktick, Data: "`"},
1043 {Type: TokenWord, Data: "echo"},
1044 {Type: TokenWhitespace, Data: " "},
1045 {Type: TokenStringStart, Data: "\"1"},
1046 {Type: TokenOpenBacktick, Data: "\\`"},
1047 {Type: TokenWord, Data: "echo"},
1048 {Type: TokenWhitespace, Data: " "},
1049 {Type: TokenWord, Data: "u"},
1050 {Type: TokenOpenBacktick, Data: "\\\\\\`"},
1051 {Type: TokenWord, Data: "echo"},
1052 {Type: TokenWhitespace, Data: " "},
1053 {Type: TokenWord, Data: "123"},
1054 {Type: TokenCloseBacktick, Data: "\\\\\\`"},
1055 {Type: TokenWord, Data: "v"},
1056 {Type: TokenCloseBacktick, Data: "\\`"},
1057 {Type: TokenStringEnd, Data: "3\""},
1058 {Type: TokenCloseBacktick, Data: "`"},
1059 {Type: TokenStringEnd, Data: "c\""},
1060 {Type: parser.TokenDone, Data: ""},
1061 },
1062 },
1063 { // 72
1064 "`\\``",
1065 []parser.Token{
1066 {Type: TokenOpenBacktick, Data: "`"},
1067 {Type: TokenOpenBacktick, Data: "\\`"},
1068 {Type: parser.TokenError, Data: "incorrect backtick depth"},
1069 },
1070 },
1071 { // 73
1072 "`\\`\\\\\\``",
1073 []parser.Token{
1074 {Type: TokenOpenBacktick, Data: "`"},
1075 {Type: TokenOpenBacktick, Data: "\\`"},
1076 {Type: TokenOpenBacktick, Data: "\\\\\\`"},
1077 {Type: parser.TokenError, Data: "incorrect backtick depth"},
1078 },
1079 },
1080 { // 74
1081 "`\\`\\\\\\`\\`",
1082 []parser.Token{
1083 {Type: TokenOpenBacktick, Data: "`"},
1084 {Type: TokenOpenBacktick, Data: "\\`"},
1085 {Type: TokenOpenBacktick, Data: "\\\\\\`"},
1086 {Type: parser.TokenError, Data: "incorrect backtick depth"},
1087 },
1088 },
1089 { // 75
1090 "`\\$abc`",
1091 []parser.Token{
1092 {Type: TokenOpenBacktick, Data: "`"},
1093 {Type: TokenIdentifier, Data: "\\$abc"},
1094 {Type: TokenCloseBacktick, Data: "`"},
1095 {Type: parser.TokenDone, Data: ""},
1096 },
1097 },
1098 { // 76
1099 "echo `echo \\\"abc\\\"`",
1100 []parser.Token{
1101 {Type: TokenWord, Data: "echo"},
1102 {Type: TokenWhitespace, Data: " "},
1103 {Type: TokenOpenBacktick, Data: "`"},
1104 {Type: TokenWord, Data: "echo"},
1105 {Type: TokenWhitespace, Data: " "},
1106 {Type: TokenWord, Data: "\\\"abc\\\""},
1107 {Type: TokenCloseBacktick, Data: "`"},
1108 {Type: parser.TokenDone, Data: ""},
1109 },
1110 },
1111 { // 77
1112 "\\\"abc\\\"",
1113 []parser.Token{
1114 {Type: TokenWord, Data: "\\\"abc\\\""},
1115 {Type: parser.TokenDone, Data: ""},
1116 },
1117 },
1118 { // 78
1119 "\\",
1120 []parser.Token{
1121 {Type: parser.TokenError, Data: "unexpected EOF"},
1122 },
1123 },
1124 { // 79
1125 "{abc}>2",
1126 []parser.Token{
1127 {Type: TokenBraceWord, Data: "{abc}"},
1128 {Type: TokenPunctuator, Data: ">"},
1129 {Type: TokenWord, Data: "2"},
1130 {Type: parser.TokenDone, Data: ""},
1131 },
1132 },
1133 { // 80
1134 "<&1-",
1135 []parser.Token{
1136 {Type: TokenPunctuator, Data: "<&"},
1137 {Type: TokenWord, Data: "1-"},
1138 {Type: parser.TokenDone, Data: ""},
1139 },
1140 },
1141 { // 81
1142 "<(a)",
1143 []parser.Token{
1144 {Type: TokenPunctuator, Data: "<("},
1145 {Type: TokenWord, Data: "a"},
1146 {Type: TokenPunctuator, Data: ")"},
1147 {Type: parser.TokenDone, Data: ""},
1148 },
1149 },
1150 { // 82
1151 "a >(b)",
1152 []parser.Token{
1153 {Type: TokenWord, Data: "a"},
1154 {Type: TokenWhitespace, Data: " "},
1155 {Type: TokenPunctuator, Data: ">("},
1156 {Type: TokenWord, Data: "b"},
1157 {Type: TokenPunctuator, Data: ")"},
1158 {Type: parser.TokenDone, Data: ""},
1159 },
1160 },
1161 { // 83
1162 ": ${!a} ${!a*} ${!a@} ${!a[@]} ${!a[*]} ${a:1:2} ${a: -1 : -2} ${a:1} ${a:-b} ${a:=b} ${a:?a is unset} ${a:+a is set} ${#a} ${#} ${a#b} ${a##b} ${a%b} ${a%%b} ${a/b/c} ${a//b/c} ${a/#b/c} ${a/%b/c} ${a^b} ${a^^b} ${a,b} ${a,,b} ${a@Q} ${a@a} ${a@P}",
1163 []parser.Token{
1164 {Type: TokenWord, Data: ":"},
1165 {Type: TokenWhitespace, Data: " "},
1166 {Type: TokenPunctuator, Data: "${"},
1167 {Type: TokenPunctuator, Data: "!"},
1168 {Type: TokenIdentifier, Data: "a"},
1169 {Type: TokenPunctuator, Data: "}"},
1170 {Type: TokenWhitespace, Data: " "},
1171 {Type: TokenPunctuator, Data: "${"},
1172 {Type: TokenPunctuator, Data: "!"},
1173 {Type: TokenIdentifier, Data: "a"},
1174 {Type: TokenPunctuator, Data: "*"},
1175 {Type: TokenPunctuator, Data: "}"},
1176 {Type: TokenWhitespace, Data: " "},
1177 {Type: TokenPunctuator, Data: "${"},
1178 {Type: TokenPunctuator, Data: "!"},
1179 {Type: TokenIdentifier, Data: "a"},
1180 {Type: TokenPunctuator, Data: "@"},
1181 {Type: TokenPunctuator, Data: "}"},
1182 {Type: TokenWhitespace, Data: " "},
1183 {Type: TokenPunctuator, Data: "${"},
1184 {Type: TokenPunctuator, Data: "!"},
1185 {Type: TokenIdentifier, Data: "a"},
1186 {Type: TokenPunctuator, Data: "["},
1187 {Type: TokenWord, Data: "@"},
1188 {Type: TokenPunctuator, Data: "]"},
1189 {Type: TokenPunctuator, Data: "}"},
1190 {Type: TokenWhitespace, Data: " "},
1191 {Type: TokenPunctuator, Data: "${"},
1192 {Type: TokenPunctuator, Data: "!"},
1193 {Type: TokenIdentifier, Data: "a"},
1194 {Type: TokenPunctuator, Data: "["},
1195 {Type: TokenWord, Data: "*"},
1196 {Type: TokenPunctuator, Data: "]"},
1197 {Type: TokenPunctuator, Data: "}"},
1198 {Type: TokenWhitespace, Data: " "},
1199 {Type: TokenPunctuator, Data: "${"},
1200 {Type: TokenIdentifier, Data: "a"},
1201 {Type: TokenPunctuator, Data: ":"},
1202 {Type: TokenNumberLiteral, Data: "1"},
1203 {Type: TokenPunctuator, Data: ":"},
1204 {Type: TokenNumberLiteral, Data: "2"},
1205 {Type: TokenPunctuator, Data: "}"},
1206 {Type: TokenWhitespace, Data: " "},
1207 {Type: TokenPunctuator, Data: "${"},
1208 {Type: TokenIdentifier, Data: "a"},
1209 {Type: TokenPunctuator, Data: ":"},
1210 {Type: TokenWhitespace, Data: " "},
1211 {Type: TokenNumberLiteral, Data: "-1"},
1212 {Type: TokenWhitespace, Data: " "},
1213 {Type: TokenPunctuator, Data: ":"},
1214 {Type: TokenWhitespace, Data: " "},
1215 {Type: TokenNumberLiteral, Data: "-2"},
1216 {Type: TokenPunctuator, Data: "}"},
1217 {Type: TokenWhitespace, Data: " "},
1218 {Type: TokenPunctuator, Data: "${"},
1219 {Type: TokenIdentifier, Data: "a"},
1220 {Type: TokenPunctuator, Data: ":"},
1221 {Type: TokenNumberLiteral, Data: "1"},
1222 {Type: TokenPunctuator, Data: "}"},
1223 {Type: TokenWhitespace, Data: " "},
1224 {Type: TokenPunctuator, Data: "${"},
1225 {Type: TokenIdentifier, Data: "a"},
1226 {Type: TokenPunctuator, Data: ":-"},
1227 {Type: TokenWord, Data: "b"},
1228 {Type: TokenPunctuator, Data: "}"},
1229 {Type: TokenWhitespace, Data: " "},
1230 {Type: TokenPunctuator, Data: "${"},
1231 {Type: TokenIdentifier, Data: "a"},
1232 {Type: TokenPunctuator, Data: ":="},
1233 {Type: TokenWord, Data: "b"},
1234 {Type: TokenPunctuator, Data: "}"},
1235 {Type: TokenWhitespace, Data: " "},
1236 {Type: TokenPunctuator, Data: "${"},
1237 {Type: TokenIdentifier, Data: "a"},
1238 {Type: TokenPunctuator, Data: ":?"},
1239 {Type: TokenWord, Data: "a"},
1240 {Type: TokenWhitespace, Data: " "},
1241 {Type: TokenWord, Data: "is"},
1242 {Type: TokenWhitespace, Data: " "},
1243 {Type: TokenWord, Data: "unset"},
1244 {Type: TokenPunctuator, Data: "}"},
1245 {Type: TokenWhitespace, Data: " "},
1246 {Type: TokenPunctuator, Data: "${"},
1247 {Type: TokenIdentifier, Data: "a"},
1248 {Type: TokenPunctuator, Data: ":+"},
1249 {Type: TokenWord, Data: "a"},
1250 {Type: TokenWhitespace, Data: " "},
1251 {Type: TokenWord, Data: "is"},
1252 {Type: TokenWhitespace, Data: " "},
1253 {Type: TokenWord, Data: "set"},
1254 {Type: TokenPunctuator, Data: "}"},
1255 {Type: TokenWhitespace, Data: " "},
1256 {Type: TokenPunctuator, Data: "${"},
1257 {Type: TokenPunctuator, Data: "#"},
1258 {Type: TokenIdentifier, Data: "a"},
1259 {Type: TokenPunctuator, Data: "}"},
1260 {Type: TokenWhitespace, Data: " "},
1261 {Type: TokenPunctuator, Data: "${"},
1262 {Type: TokenKeyword, Data: "#"},
1263 {Type: TokenPunctuator, Data: "}"},
1264 {Type: TokenWhitespace, Data: " "},
1265 {Type: TokenPunctuator, Data: "${"},
1266 {Type: TokenIdentifier, Data: "a"},
1267 {Type: TokenPunctuator, Data: "#"},
1268 {Type: TokenPattern, Data: "b"},
1269 {Type: TokenPunctuator, Data: "}"},
1270 {Type: TokenWhitespace, Data: " "},
1271 {Type: TokenPunctuator, Data: "${"},
1272 {Type: TokenIdentifier, Data: "a"},
1273 {Type: TokenPunctuator, Data: "##"},
1274 {Type: TokenPattern, Data: "b"},
1275 {Type: TokenPunctuator, Data: "}"},
1276 {Type: TokenWhitespace, Data: " "},
1277 {Type: TokenPunctuator, Data: "${"},
1278 {Type: TokenIdentifier, Data: "a"},
1279 {Type: TokenPunctuator, Data: "%"},
1280 {Type: TokenPattern, Data: "b"},
1281 {Type: TokenPunctuator, Data: "}"},
1282 {Type: TokenWhitespace, Data: " "},
1283 {Type: TokenPunctuator, Data: "${"},
1284 {Type: TokenIdentifier, Data: "a"},
1285 {Type: TokenPunctuator, Data: "%%"},
1286 {Type: TokenPattern, Data: "b"},
1287 {Type: TokenPunctuator, Data: "}"},
1288 {Type: TokenWhitespace, Data: " "},
1289 {Type: TokenPunctuator, Data: "${"},
1290 {Type: TokenIdentifier, Data: "a"},
1291 {Type: TokenPunctuator, Data: "/"},
1292 {Type: TokenPattern, Data: "b"},
1293 {Type: TokenPunctuator, Data: "/"},
1294 {Type: TokenWord, Data: "c"},
1295 {Type: TokenPunctuator, Data: "}"},
1296 {Type: TokenWhitespace, Data: " "},
1297 {Type: TokenPunctuator, Data: "${"},
1298 {Type: TokenIdentifier, Data: "a"},
1299 {Type: TokenPunctuator, Data: "//"},
1300 {Type: TokenPattern, Data: "b"},
1301 {Type: TokenPunctuator, Data: "/"},
1302 {Type: TokenWord, Data: "c"},
1303 {Type: TokenPunctuator, Data: "}"},
1304 {Type: TokenWhitespace, Data: " "},
1305 {Type: TokenPunctuator, Data: "${"},
1306 {Type: TokenIdentifier, Data: "a"},
1307 {Type: TokenPunctuator, Data: "/#"},
1308 {Type: TokenPattern, Data: "b"},
1309 {Type: TokenPunctuator, Data: "/"},
1310 {Type: TokenWord, Data: "c"},
1311 {Type: TokenPunctuator, Data: "}"},
1312 {Type: TokenWhitespace, Data: " "},
1313 {Type: TokenPunctuator, Data: "${"},
1314 {Type: TokenIdentifier, Data: "a"},
1315 {Type: TokenPunctuator, Data: "/%"},
1316 {Type: TokenPattern, Data: "b"},
1317 {Type: TokenPunctuator, Data: "/"},
1318 {Type: TokenWord, Data: "c"},
1319 {Type: TokenPunctuator, Data: "}"},
1320 {Type: TokenWhitespace, Data: " "},
1321 {Type: TokenPunctuator, Data: "${"},
1322 {Type: TokenIdentifier, Data: "a"},
1323 {Type: TokenPunctuator, Data: "^"},
1324 {Type: TokenPattern, Data: "b"},
1325 {Type: TokenPunctuator, Data: "}"},
1326 {Type: TokenWhitespace, Data: " "},
1327 {Type: TokenPunctuator, Data: "${"},
1328 {Type: TokenIdentifier, Data: "a"},
1329 {Type: TokenPunctuator, Data: "^^"},
1330 {Type: TokenPattern, Data: "b"},
1331 {Type: TokenPunctuator, Data: "}"},
1332 {Type: TokenWhitespace, Data: " "},
1333 {Type: TokenPunctuator, Data: "${"},
1334 {Type: TokenIdentifier, Data: "a"},
1335 {Type: TokenPunctuator, Data: ","},
1336 {Type: TokenPattern, Data: "b"},
1337 {Type: TokenPunctuator, Data: "}"},
1338 {Type: TokenWhitespace, Data: " "},
1339 {Type: TokenPunctuator, Data: "${"},
1340 {Type: TokenIdentifier, Data: "a"},
1341 {Type: TokenPunctuator, Data: ",,"},
1342 {Type: TokenPattern, Data: "b"},
1343 {Type: TokenPunctuator, Data: "}"},
1344 {Type: TokenWhitespace, Data: " "},
1345 {Type: TokenPunctuator, Data: "${"},
1346 {Type: TokenIdentifier, Data: "a"},
1347 {Type: TokenPunctuator, Data: "@"},
1348 {Type: TokenBraceWord, Data: "Q"},
1349 {Type: TokenPunctuator, Data: "}"},
1350 {Type: TokenWhitespace, Data: " "},
1351 {Type: TokenPunctuator, Data: "${"},
1352 {Type: TokenIdentifier, Data: "a"},
1353 {Type: TokenPunctuator, Data: "@"},
1354 {Type: TokenBraceWord, Data: "a"},
1355 {Type: TokenPunctuator, Data: "}"},
1356 {Type: TokenWhitespace, Data: " "},
1357 {Type: TokenPunctuator, Data: "${"},
1358 {Type: TokenIdentifier, Data: "a"},
1359 {Type: TokenPunctuator, Data: "@"},
1360 {Type: TokenBraceWord, Data: "P"},
1361 {Type: TokenPunctuator, Data: "}"},
1362 {Type: parser.TokenDone, Data: ""},
1363 },
1364 },
1365 { // 84
1366 "${a[@@]}",
1367 []parser.Token{
1368 {Type: TokenPunctuator, Data: "${"},
1369 {Type: TokenIdentifier, Data: "a"},
1370 {Type: TokenPunctuator, Data: "["},
1371 {Type: TokenWord, Data: "@"},
1372 {Type: parser.TokenError, Data: "invalid character"},
1373 },
1374 },
1375 { // 85
1376 "${a/[/c}",
1377 []parser.Token{
1378 {Type: TokenPunctuator, Data: "${"},
1379 {Type: TokenIdentifier, Data: "a"},
1380 {Type: TokenPunctuator, Data: "/"},
1381 {Type: parser.TokenError, Data: "unexpected EOF"},
1382 },
1383 },
1384 { // 86
1385 "${a/\\[/c}",
1386 []parser.Token{
1387 {Type: TokenPunctuator, Data: "${"},
1388 {Type: TokenIdentifier, Data: "a"},
1389 {Type: TokenPunctuator, Data: "/"},
1390 {Type: TokenPattern, Data: "\\["},
1391 {Type: TokenPunctuator, Data: "/"},
1392 {Type: TokenWord, Data: "c"},
1393 {Type: TokenPunctuator, Data: "}"},
1394 {Type: parser.TokenDone, Data: ""},
1395 },
1396 },
1397 { // 87
1398 "${a/[b]/c}",
1399 []parser.Token{
1400 {Type: TokenPunctuator, Data: "${"},
1401 {Type: TokenIdentifier, Data: "a"},
1402 {Type: TokenPunctuator, Data: "/"},
1403 {Type: TokenPattern, Data: "[b]"},
1404 {Type: TokenPunctuator, Data: "/"},
1405 {Type: TokenWord, Data: "c"},
1406 {Type: TokenPunctuator, Data: "}"},
1407 {Type: parser.TokenDone, Data: ""},
1408 },
1409 },
1410 { // 88
1411 "${a/(/c}",
1412 []parser.Token{
1413 {Type: TokenPunctuator, Data: "${"},
1414 {Type: TokenIdentifier, Data: "a"},
1415 {Type: TokenPunctuator, Data: "/"},
1416 {Type: parser.TokenError, Data: "invalid character"},
1417 },
1418 },
1419 { // 89
1420 "${a/\\(/c}",
1421 []parser.Token{
1422 {Type: TokenPunctuator, Data: "${"},
1423 {Type: TokenIdentifier, Data: "a"},
1424 {Type: TokenPunctuator, Data: "/"},
1425 {Type: TokenPattern, Data: "\\("},
1426 {Type: TokenPunctuator, Data: "/"},
1427 {Type: TokenWord, Data: "c"},
1428 {Type: TokenPunctuator, Data: "}"},
1429 {Type: parser.TokenDone, Data: ""},
1430 },
1431 },
1432 { // 90
1433 "${a/(b)/c}",
1434 []parser.Token{
1435 {Type: TokenPunctuator, Data: "${"},
1436 {Type: TokenIdentifier, Data: "a"},
1437 {Type: TokenPunctuator, Data: "/"},
1438 {Type: TokenPattern, Data: "(b)"},
1439 {Type: TokenPunctuator, Data: "/"},
1440 {Type: TokenWord, Data: "c"},
1441 {Type: TokenPunctuator, Data: "}"},
1442 {Type: parser.TokenDone, Data: ""},
1443 },
1444 },
1445 { // 91
1446 "${a@Z}",
1447 []parser.Token{
1448 {Type: TokenPunctuator, Data: "${"},
1449 {Type: TokenIdentifier, Data: "a"},
1450 {Type: TokenPunctuator, Data: "@"},
1451 {Type: parser.TokenError, Data: "invalid parameter expansion"},
1452 },
1453 },
1454 { // 92
1455 "${@} ${*}",
1456 []parser.Token{
1457 {Type: TokenPunctuator, Data: "${"},
1458 {Type: TokenKeyword, Data: "@"},
1459 {Type: TokenPunctuator, Data: "}"},
1460 {Type: TokenWhitespace, Data: " "},
1461 {Type: TokenPunctuator, Data: "${"},
1462 {Type: TokenKeyword, Data: "*"},
1463 {Type: TokenPunctuator, Data: "}"},
1464 {Type: parser.TokenDone, Data: ""},
1465 },
1466 },
1467 { // 93
1468 "$() $(()) `` ${}",
1469 []parser.Token{
1470 {Type: TokenPunctuator, Data: "$("},
1471 {Type: TokenPunctuator, Data: ")"},
1472 {Type: TokenWhitespace, Data: " "},
1473 {Type: TokenPunctuator, Data: "$(("},
1474 {Type: TokenPunctuator, Data: "))"},
1475 {Type: TokenWhitespace, Data: " "},
1476 {Type: TokenOpenBacktick, Data: "`"},
1477 {Type: TokenCloseBacktick, Data: "`"},
1478 {Type: TokenWhitespace, Data: " "},
1479 {Type: TokenPunctuator, Data: "${"},
1480 {Type: parser.TokenError, Data: "invalid parameter expansion"},
1481 },
1482 },
1483 { // 94
1484 "case a in b)c;;esac",
1485 []parser.Token{
1486 {Type: TokenKeyword, Data: "case"},
1487 {Type: TokenWhitespace, Data: " "},
1488 {Type: TokenWord, Data: "a"},
1489 {Type: TokenWhitespace, Data: " "},
1490 {Type: TokenKeyword, Data: "in"},
1491 {Type: TokenWhitespace, Data: " "},
1492 {Type: TokenWord, Data: "b"},
1493 {Type: TokenPunctuator, Data: ")"},
1494 {Type: TokenWord, Data: "c"},
1495 {Type: TokenPunctuator, Data: ";;"},
1496 {Type: TokenKeyword, Data: "esac"},
1497 {Type: parser.TokenDone, Data: ""},
1498 },
1499 },
1500 { // 95
1501 "case a in b;;esac",
1502 []parser.Token{
1503 {Type: TokenKeyword, Data: "case"},
1504 {Type: TokenWhitespace, Data: " "},
1505 {Type: TokenWord, Data: "a"},
1506 {Type: TokenWhitespace, Data: " "},
1507 {Type: TokenKeyword, Data: "in"},
1508 {Type: TokenWhitespace, Data: " "},
1509 {Type: TokenWord, Data: "b"},
1510 {Type: parser.TokenError, Data: "invalid character"},
1511 },
1512 },
1513 { // 96
1514 "case a in esac",
1515 []parser.Token{
1516 {Type: TokenKeyword, Data: "case"},
1517 {Type: TokenWhitespace, Data: " "},
1518 {Type: TokenWord, Data: "a"},
1519 {Type: TokenWhitespace, Data: " "},
1520 {Type: TokenKeyword, Data: "in"},
1521 {Type: TokenWhitespace, Data: " "},
1522 {Type: TokenKeyword, Data: "esac"},
1523 {Type: parser.TokenDone, Data: ""},
1524 },
1525 },
1526 { // 97
1527 "case a in #comment\nesac",
1528 []parser.Token{
1529 {Type: TokenKeyword, Data: "case"},
1530 {Type: TokenWhitespace, Data: " "},
1531 {Type: TokenWord, Data: "a"},
1532 {Type: TokenWhitespace, Data: " "},
1533 {Type: TokenKeyword, Data: "in"},
1534 {Type: TokenWhitespace, Data: " "},
1535 {Type: TokenComment, Data: "#comment"},
1536 {Type: TokenLineTerminator, Data: "\n"},
1537 {Type: TokenKeyword, Data: "esac"},
1538 {Type: parser.TokenDone, Data: ""},
1539 },
1540 },
1541 { // 98
1542 "case a in b)c;;d)e;&f)g;;&h)i\nesac",
1543 []parser.Token{
1544 {Type: TokenKeyword, Data: "case"},
1545 {Type: TokenWhitespace, Data: " "},
1546 {Type: TokenWord, Data: "a"},
1547 {Type: TokenWhitespace, Data: " "},
1548 {Type: TokenKeyword, Data: "in"},
1549 {Type: TokenWhitespace, Data: " "},
1550 {Type: TokenWord, Data: "b"},
1551 {Type: TokenPunctuator, Data: ")"},
1552 {Type: TokenWord, Data: "c"},
1553 {Type: TokenPunctuator, Data: ";;"},
1554 {Type: TokenWord, Data: "d"},
1555 {Type: TokenPunctuator, Data: ")"},
1556 {Type: TokenWord, Data: "e"},
1557 {Type: TokenPunctuator, Data: ";&"},
1558 {Type: TokenWord, Data: "f"},
1559 {Type: TokenPunctuator, Data: ")"},
1560 {Type: TokenWord, Data: "g"},
1561 {Type: TokenPunctuator, Data: ";;&"},
1562 {Type: TokenWord, Data: "h"},
1563 {Type: TokenPunctuator, Data: ")"},
1564 {Type: TokenWord, Data: "i"},
1565 {Type: TokenLineTerminator, Data: "\n"},
1566 {Type: TokenKeyword, Data: "esac"},
1567 {Type: parser.TokenDone, Data: ""},
1568 },
1569 },
1570 { // 99
1571 "case a in b) #comment\nc;; #comment\nd)e;&f)g\n#comment\nesac",
1572 []parser.Token{
1573 {Type: TokenKeyword, Data: "case"},
1574 {Type: TokenWhitespace, Data: " "},
1575 {Type: TokenWord, Data: "a"},
1576 {Type: TokenWhitespace, Data: " "},
1577 {Type: TokenKeyword, Data: "in"},
1578 {Type: TokenWhitespace, Data: " "},
1579 {Type: TokenWord, Data: "b"},
1580 {Type: TokenPunctuator, Data: ")"},
1581 {Type: TokenWhitespace, Data: " "},
1582 {Type: TokenComment, Data: "#comment"},
1583 {Type: TokenLineTerminator, Data: "\n"},
1584 {Type: TokenWord, Data: "c"},
1585 {Type: TokenPunctuator, Data: ";;"},
1586 {Type: TokenWhitespace, Data: " "},
1587 {Type: TokenComment, Data: "#comment"},
1588 {Type: TokenLineTerminator, Data: "\n"},
1589 {Type: TokenWord, Data: "d"},
1590 {Type: TokenPunctuator, Data: ")"},
1591 {Type: TokenWord, Data: "e"},
1592 {Type: TokenPunctuator, Data: ";&"},
1593 {Type: TokenWord, Data: "f"},
1594 {Type: TokenPunctuator, Data: ")"},
1595 {Type: TokenWord, Data: "g"},
1596 {Type: TokenLineTerminator, Data: "\n"},
1597 {Type: TokenComment, Data: "#comment"},
1598 {Type: TokenLineTerminator, Data: "\n"},
1599 {Type: TokenKeyword, Data: "esac"},
1600 {Type: parser.TokenDone, Data: ""},
1601 },
1602 },
1603 { // 100
1604 "case a b)c;;esac",
1605 []parser.Token{
1606 {Type: TokenKeyword, Data: "case"},
1607 {Type: TokenWhitespace, Data: " "},
1608 {Type: TokenWord, Data: "a"},
1609 {Type: TokenWhitespace, Data: " "},
1610 {Type: parser.TokenError, Data: "missing in"},
1611 },
1612 },
1613 { // 101
1614 "case a in b)c;;",
1615 []parser.Token{
1616 {Type: TokenKeyword, Data: "case"},
1617 {Type: TokenWhitespace, Data: " "},
1618 {Type: TokenWord, Data: "a"},
1619 {Type: TokenWhitespace, Data: " "},
1620 {Type: TokenKeyword, Data: "in"},
1621 {Type: TokenWhitespace, Data: " "},
1622 {Type: TokenWord, Data: "b"},
1623 {Type: TokenPunctuator, Data: ")"},
1624 {Type: TokenWord, Data: "c"},
1625 {Type: TokenPunctuator, Data: ";;"},
1626 {Type: parser.TokenError, Data: "unexpected EOF"},
1627 },
1628 },
1629 { // 102
1630 "if a; then b; fi",
1631 []parser.Token{
1632 {Type: TokenKeyword, Data: "if"},
1633 {Type: TokenWhitespace, Data: " "},
1634 {Type: TokenWord, Data: "a"},
1635 {Type: TokenPunctuator, Data: ";"},
1636 {Type: TokenWhitespace, Data: " "},
1637 {Type: TokenKeyword, Data: "then"},
1638 {Type: TokenWhitespace, Data: " "},
1639 {Type: TokenWord, Data: "b"},
1640 {Type: TokenPunctuator, Data: ";"},
1641 {Type: TokenWhitespace, Data: " "},
1642 {Type: TokenKeyword, Data: "fi"},
1643 {Type: parser.TokenDone, Data: ""},
1644 },
1645 },
1646 { // 103
1647 "if a;\nthen\nb\nfi",
1648 []parser.Token{
1649 {Type: TokenKeyword, Data: "if"},
1650 {Type: TokenWhitespace, Data: " "},
1651 {Type: TokenWord, Data: "a"},
1652 {Type: TokenPunctuator, Data: ";"},
1653 {Type: TokenLineTerminator, Data: "\n"},
1654 {Type: TokenKeyword, Data: "then"},
1655 {Type: TokenLineTerminator, Data: "\n"},
1656 {Type: TokenWord, Data: "b"},
1657 {Type: TokenLineTerminator, Data: "\n"},
1658 {Type: TokenKeyword, Data: "fi"},
1659 {Type: parser.TokenDone, Data: ""},
1660 },
1661 },
1662 { // 104
1663 "if a && b || c & then d; fi",
1664 []parser.Token{
1665 {Type: TokenKeyword, Data: "if"},
1666 {Type: TokenWhitespace, Data: " "},
1667 {Type: TokenWord, Data: "a"},
1668 {Type: TokenWhitespace, Data: " "},
1669 {Type: TokenPunctuator, Data: "&&"},
1670 {Type: TokenWhitespace, Data: " "},
1671 {Type: TokenWord, Data: "b"},
1672 {Type: TokenWhitespace, Data: " "},
1673 {Type: TokenPunctuator, Data: "||"},
1674 {Type: TokenWhitespace, Data: " "},
1675 {Type: TokenWord, Data: "c"},
1676 {Type: TokenWhitespace, Data: " "},
1677 {Type: TokenPunctuator, Data: "&"},
1678 {Type: TokenWhitespace, Data: " "},
1679 {Type: TokenKeyword, Data: "then"},
1680 {Type: TokenWhitespace, Data: " "},
1681 {Type: TokenWord, Data: "d"},
1682 {Type: TokenPunctuator, Data: ";"},
1683 {Type: TokenWhitespace, Data: " "},
1684 {Type: TokenKeyword, Data: "fi"},
1685 {Type: parser.TokenDone, Data: ""},
1686 },
1687 },
1688 { // 105
1689 "if a; then b; else c; fi",
1690 []parser.Token{
1691 {Type: TokenKeyword, Data: "if"},
1692 {Type: TokenWhitespace, Data: " "},
1693 {Type: TokenWord, Data: "a"},
1694 {Type: TokenPunctuator, Data: ";"},
1695 {Type: TokenWhitespace, Data: " "},
1696 {Type: TokenKeyword, Data: "then"},
1697 {Type: TokenWhitespace, Data: " "},
1698 {Type: TokenWord, Data: "b"},
1699 {Type: TokenPunctuator, Data: ";"},
1700 {Type: TokenWhitespace, Data: " "},
1701 {Type: TokenKeyword, Data: "else"},
1702 {Type: TokenWhitespace, Data: " "},
1703 {Type: TokenWord, Data: "c"},
1704 {Type: TokenPunctuator, Data: ";"},
1705 {Type: TokenWhitespace, Data: " "},
1706 {Type: TokenKeyword, Data: "fi"},
1707 {Type: parser.TokenDone, Data: ""},
1708 },
1709 },
1710 { // 106
1711 "if a #comment\n then b; else #comment\nc; fi",
1712 []parser.Token{
1713 {Type: TokenKeyword, Data: "if"},
1714 {Type: TokenWhitespace, Data: " "},
1715 {Type: TokenWord, Data: "a"},
1716 {Type: TokenWhitespace, Data: " "},
1717 {Type: TokenComment, Data: "#comment"},
1718 {Type: TokenLineTerminator, Data: "\n"},
1719 {Type: TokenWhitespace, Data: " "},
1720 {Type: TokenKeyword, Data: "then"},
1721 {Type: TokenWhitespace, Data: " "},
1722 {Type: TokenWord, Data: "b"},
1723 {Type: TokenPunctuator, Data: ";"},
1724 {Type: TokenWhitespace, Data: " "},
1725 {Type: TokenKeyword, Data: "else"},
1726 {Type: TokenWhitespace, Data: " "},
1727 {Type: TokenComment, Data: "#comment"},
1728 {Type: TokenLineTerminator, Data: "\n"},
1729 {Type: TokenWord, Data: "c"},
1730 {Type: TokenPunctuator, Data: ";"},
1731 {Type: TokenWhitespace, Data: " "},
1732 {Type: TokenKeyword, Data: "fi"},
1733 {Type: parser.TokenDone, Data: ""},
1734 },
1735 },
1736 { // 107
1737 "if a\n#comment\n then b; else\n#comment\nc; fi",
1738 []parser.Token{
1739 {Type: TokenKeyword, Data: "if"},
1740 {Type: TokenWhitespace, Data: " "},
1741 {Type: TokenWord, Data: "a"},
1742 {Type: TokenLineTerminator, Data: "\n"},
1743 {Type: TokenComment, Data: "#comment"},
1744 {Type: TokenLineTerminator, Data: "\n"},
1745 {Type: TokenWhitespace, Data: " "},
1746 {Type: TokenKeyword, Data: "then"},
1747 {Type: TokenWhitespace, Data: " "},
1748 {Type: TokenWord, Data: "b"},
1749 {Type: TokenPunctuator, Data: ";"},
1750 {Type: TokenWhitespace, Data: " "},
1751 {Type: TokenKeyword, Data: "else"},
1752 {Type: TokenLineTerminator, Data: "\n"},
1753 {Type: TokenComment, Data: "#comment"},
1754 {Type: TokenLineTerminator, Data: "\n"},
1755 {Type: TokenWord, Data: "c"},
1756 {Type: TokenPunctuator, Data: ";"},
1757 {Type: TokenWhitespace, Data: " "},
1758 {Type: TokenKeyword, Data: "fi"},
1759 {Type: parser.TokenDone, Data: ""},
1760 },
1761 },
1762 { // 108
1763 "if a; then b; elif c; then d; else if e; then f; fi; fi",
1764 []parser.Token{
1765 {Type: TokenKeyword, Data: "if"},
1766 {Type: TokenWhitespace, Data: " "},
1767 {Type: TokenWord, Data: "a"},
1768 {Type: TokenPunctuator, Data: ";"},
1769 {Type: TokenWhitespace, Data: " "},
1770 {Type: TokenKeyword, Data: "then"},
1771 {Type: TokenWhitespace, Data: " "},
1772 {Type: TokenWord, Data: "b"},
1773 {Type: TokenPunctuator, Data: ";"},
1774 {Type: TokenWhitespace, Data: " "},
1775 {Type: TokenKeyword, Data: "elif"},
1776 {Type: TokenWhitespace, Data: " "},
1777 {Type: TokenWord, Data: "c"},
1778 {Type: TokenPunctuator, Data: ";"},
1779 {Type: TokenWhitespace, Data: " "},
1780 {Type: TokenKeyword, Data: "then"},
1781 {Type: TokenWhitespace, Data: " "},
1782 {Type: TokenWord, Data: "d"},
1783 {Type: TokenPunctuator, Data: ";"},
1784 {Type: TokenWhitespace, Data: " "},
1785 {Type: TokenKeyword, Data: "else"},
1786 {Type: TokenWhitespace, Data: " "},
1787 {Type: TokenKeyword, Data: "if"},
1788 {Type: TokenWhitespace, Data: " "},
1789 {Type: TokenWord, Data: "e"},
1790 {Type: TokenPunctuator, Data: ";"},
1791 {Type: TokenWhitespace, Data: " "},
1792 {Type: TokenKeyword, Data: "then"},
1793 {Type: TokenWhitespace, Data: " "},
1794 {Type: TokenWord, Data: "f"},
1795 {Type: TokenPunctuator, Data: ";"},
1796 {Type: TokenWhitespace, Data: " "},
1797 {Type: TokenKeyword, Data: "fi"},
1798 {Type: TokenPunctuator, Data: ";"},
1799 {Type: TokenWhitespace, Data: " "},
1800 {Type: TokenKeyword, Data: "fi"},
1801 {Type: parser.TokenDone, Data: ""},
1802 },
1803 },
1804 { // 109
1805 "while a; do b; done",
1806 []parser.Token{
1807 {Type: TokenKeyword, Data: "while"},
1808 {Type: TokenWhitespace, Data: " "},
1809 {Type: TokenWord, Data: "a"},
1810 {Type: TokenPunctuator, Data: ";"},
1811 {Type: TokenWhitespace, Data: " "},
1812 {Type: TokenKeyword, Data: "do"},
1813 {Type: TokenWhitespace, Data: " "},
1814 {Type: TokenWord, Data: "b"},
1815 {Type: TokenPunctuator, Data: ";"},
1816 {Type: TokenWhitespace, Data: " "},
1817 {Type: TokenKeyword, Data: "done"},
1818 {Type: parser.TokenDone, Data: ""},
1819 },
1820 },
1821 { // 110
1822 "while a; #comment\ndo b; done",
1823 []parser.Token{
1824 {Type: TokenKeyword, Data: "while"},
1825 {Type: TokenWhitespace, Data: " "},
1826 {Type: TokenWord, Data: "a"},
1827 {Type: TokenPunctuator, Data: ";"},
1828 {Type: TokenWhitespace, Data: " "},
1829 {Type: TokenComment, Data: "#comment"},
1830 {Type: TokenLineTerminator, Data: "\n"},
1831 {Type: TokenKeyword, Data: "do"},
1832 {Type: TokenWhitespace, Data: " "},
1833 {Type: TokenWord, Data: "b"},
1834 {Type: TokenPunctuator, Data: ";"},
1835 {Type: TokenWhitespace, Data: " "},
1836 {Type: TokenKeyword, Data: "done"},
1837 {Type: parser.TokenDone, Data: ""},
1838 },
1839 },
1840 { // 111
1841 "until a && b || c & do b; done",
1842 []parser.Token{
1843 {Type: TokenKeyword, Data: "until"},
1844 {Type: TokenWhitespace, Data: " "},
1845 {Type: TokenWord, Data: "a"},
1846 {Type: TokenWhitespace, Data: " "},
1847 {Type: TokenPunctuator, Data: "&&"},
1848 {Type: TokenWhitespace, Data: " "},
1849 {Type: TokenWord, Data: "b"},
1850 {Type: TokenWhitespace, Data: " "},
1851 {Type: TokenPunctuator, Data: "||"},
1852 {Type: TokenWhitespace, Data: " "},
1853 {Type: TokenWord, Data: "c"},
1854 {Type: TokenWhitespace, Data: " "},
1855 {Type: TokenPunctuator, Data: "&"},
1856 {Type: TokenWhitespace, Data: " "},
1857 {Type: TokenKeyword, Data: "do"},
1858 {Type: TokenWhitespace, Data: " "},
1859 {Type: TokenWord, Data: "b"},
1860 {Type: TokenPunctuator, Data: ";"},
1861 {Type: TokenWhitespace, Data: " "},
1862 {Type: TokenKeyword, Data: "done"},
1863 {Type: parser.TokenDone, Data: ""},
1864 },
1865 },
1866 { // 112
1867 "while a; do break; done",
1868 []parser.Token{
1869 {Type: TokenKeyword, Data: "while"},
1870 {Type: TokenWhitespace, Data: " "},
1871 {Type: TokenWord, Data: "a"},
1872 {Type: TokenPunctuator, Data: ";"},
1873 {Type: TokenWhitespace, Data: " "},
1874 {Type: TokenKeyword, Data: "do"},
1875 {Type: TokenWhitespace, Data: " "},
1876 {Type: TokenKeyword, Data: "break"},
1877 {Type: TokenPunctuator, Data: ";"},
1878 {Type: TokenWhitespace, Data: " "},
1879 {Type: TokenKeyword, Data: "done"},
1880 {Type: parser.TokenDone, Data: ""},
1881 },
1882 },
1883 { // 113
1884 "until a; do continue; done",
1885 []parser.Token{
1886 {Type: TokenKeyword, Data: "until"},
1887 {Type: TokenWhitespace, Data: " "},
1888 {Type: TokenWord, Data: "a"},
1889 {Type: TokenPunctuator, Data: ";"},
1890 {Type: TokenWhitespace, Data: " "},
1891 {Type: TokenKeyword, Data: "do"},
1892 {Type: TokenWhitespace, Data: " "},
1893 {Type: TokenKeyword, Data: "continue"},
1894 {Type: TokenPunctuator, Data: ";"},
1895 {Type: TokenWhitespace, Data: " "},
1896 {Type: TokenKeyword, Data: "done"},
1897 {Type: parser.TokenDone, Data: ""},
1898 },
1899 },
1900 { // 114
1901 "break",
1902 []parser.Token{
1903 {Type: parser.TokenError, Data: "invalid keyword"},
1904 },
1905 },
1906 { // 115
1907 "continue",
1908 []parser.Token{
1909 {Type: parser.TokenError, Data: "invalid keyword"},
1910 },
1911 },
1912 { // 116
1913 "for a; do b; done",
1914 []parser.Token{
1915 {Type: TokenKeyword, Data: "for"},
1916 {Type: TokenWhitespace, Data: " "},
1917 {Type: TokenIdentifier, Data: "a"},
1918 {Type: TokenPunctuator, Data: ";"},
1919 {Type: TokenWhitespace, Data: " "},
1920 {Type: TokenKeyword, Data: "do"},
1921 {Type: TokenWhitespace, Data: " "},
1922 {Type: TokenWord, Data: "b"},
1923 {Type: TokenPunctuator, Data: ";"},
1924 {Type: TokenWhitespace, Data: " "},
1925 {Type: TokenKeyword, Data: "done"},
1926 {Type: parser.TokenDone, Data: ""},
1927 },
1928 },
1929 { // 117
1930 "for a #comment\ndo b; done",
1931 []parser.Token{
1932 {Type: TokenKeyword, Data: "for"},
1933 {Type: TokenWhitespace, Data: " "},
1934 {Type: TokenIdentifier, Data: "a"},
1935 {Type: TokenWhitespace, Data: " "},
1936 {Type: TokenComment, Data: "#comment"},
1937 {Type: TokenLineTerminator, Data: "\n"},
1938 {Type: TokenKeyword, Data: "do"},
1939 {Type: TokenWhitespace, Data: " "},
1940 {Type: TokenWord, Data: "b"},
1941 {Type: TokenPunctuator, Data: ";"},
1942 {Type: TokenWhitespace, Data: " "},
1943 {Type: TokenKeyword, Data: "done"},
1944 {Type: parser.TokenDone, Data: ""},
1945 },
1946 },
1947 { // 118
1948 "for a do b; done",
1949 []parser.Token{
1950 {Type: TokenKeyword, Data: "for"},
1951 {Type: TokenWhitespace, Data: " "},
1952 {Type: TokenIdentifier, Data: "a"},
1953 {Type: TokenWhitespace, Data: " "},
1954 {Type: TokenKeyword, Data: "do"},
1955 {Type: TokenWhitespace, Data: " "},
1956 {Type: TokenWord, Data: "b"},
1957 {Type: TokenPunctuator, Data: ";"},
1958 {Type: TokenWhitespace, Data: " "},
1959 {Type: TokenKeyword, Data: "done"},
1960 {Type: parser.TokenDone, Data: ""},
1961 },
1962 },
1963 { // 119
1964 "for a\ndo b; done",
1965 []parser.Token{
1966 {Type: TokenKeyword, Data: "for"},
1967 {Type: TokenWhitespace, Data: " "},
1968 {Type: TokenIdentifier, Data: "a"},
1969 {Type: TokenLineTerminator, Data: "\n"},
1970 {Type: TokenKeyword, Data: "do"},
1971 {Type: TokenWhitespace, Data: " "},
1972 {Type: TokenWord, Data: "b"},
1973 {Type: TokenPunctuator, Data: ";"},
1974 {Type: TokenWhitespace, Data: " "},
1975 {Type: TokenKeyword, Data: "done"},
1976 {Type: parser.TokenDone, Data: ""},
1977 },
1978 },
1979 { // 120
1980 "for a in 1 2 3; do b; done",
1981 []parser.Token{
1982 {Type: TokenKeyword, Data: "for"},
1983 {Type: TokenWhitespace, Data: " "},
1984 {Type: TokenIdentifier, Data: "a"},
1985 {Type: TokenWhitespace, Data: " "},
1986 {Type: TokenKeyword, Data: "in"},
1987 {Type: TokenWhitespace, Data: " "},
1988 {Type: TokenWord, Data: "1"},
1989 {Type: TokenWhitespace, Data: " "},
1990 {Type: TokenWord, Data: "2"},
1991 {Type: TokenWhitespace, Data: " "},
1992 {Type: TokenWord, Data: "3"},
1993 {Type: TokenPunctuator, Data: ";"},
1994 {Type: TokenWhitespace, Data: " "},
1995 {Type: TokenKeyword, Data: "do"},
1996 {Type: TokenWhitespace, Data: " "},
1997 {Type: TokenWord, Data: "b"},
1998 {Type: TokenPunctuator, Data: ";"},
1999 {Type: TokenWhitespace, Data: " "},
2000 {Type: TokenKeyword, Data: "done"},
2001 {Type: parser.TokenDone, Data: ""},
2002 },
2003 },
2004 { // 121
2005 "for a in 1 2 3 #comment\ndo b; done",
2006 []parser.Token{
2007 {Type: TokenKeyword, Data: "for"},
2008 {Type: TokenWhitespace, Data: " "},
2009 {Type: TokenIdentifier, Data: "a"},
2010 {Type: TokenWhitespace, Data: " "},
2011 {Type: TokenKeyword, Data: "in"},
2012 {Type: TokenWhitespace, Data: " "},
2013 {Type: TokenWord, Data: "1"},
2014 {Type: TokenWhitespace, Data: " "},
2015 {Type: TokenWord, Data: "2"},
2016 {Type: TokenWhitespace, Data: " "},
2017 {Type: TokenWord, Data: "3"},
2018 {Type: TokenWhitespace, Data: " "},
2019 {Type: TokenComment, Data: "#comment"},
2020 {Type: TokenLineTerminator, Data: "\n"},
2021 {Type: TokenKeyword, Data: "do"},
2022 {Type: TokenWhitespace, Data: " "},
2023 {Type: TokenWord, Data: "b"},
2024 {Type: TokenPunctuator, Data: ";"},
2025 {Type: TokenWhitespace, Data: " "},
2026 {Type: TokenKeyword, Data: "done"},
2027 {Type: parser.TokenDone, Data: ""},
2028 },
2029 },
2030 { // 122
2031 "for a #comment\nin 1 2 3\ndo b; done",
2032 []parser.Token{
2033 {Type: TokenKeyword, Data: "for"},
2034 {Type: TokenWhitespace, Data: " "},
2035 {Type: TokenIdentifier, Data: "a"},
2036 {Type: TokenWhitespace, Data: " "},
2037 {Type: TokenComment, Data: "#comment"},
2038 {Type: TokenLineTerminator, Data: "\n"},
2039 {Type: TokenKeyword, Data: "in"},
2040 {Type: TokenWhitespace, Data: " "},
2041 {Type: TokenWord, Data: "1"},
2042 {Type: TokenWhitespace, Data: " "},
2043 {Type: TokenWord, Data: "2"},
2044 {Type: TokenWhitespace, Data: " "},
2045 {Type: TokenWord, Data: "3"},
2046 {Type: TokenLineTerminator, Data: "\n"},
2047 {Type: TokenKeyword, Data: "do"},
2048 {Type: TokenWhitespace, Data: " "},
2049 {Type: TokenWord, Data: "b"},
2050 {Type: TokenPunctuator, Data: ";"},
2051 {Type: TokenWhitespace, Data: " "},
2052 {Type: TokenKeyword, Data: "done"},
2053 {Type: parser.TokenDone, Data: ""},
2054 },
2055 },
2056 { // 123
2057 "for % in 1 2 3; do b; done",
2058 []parser.Token{
2059 {Type: TokenKeyword, Data: "for"},
2060 {Type: TokenWhitespace, Data: " "},
2061 {Type: parser.TokenError, Data: "invalid identifier"},
2062 },
2063 },
2064 { // 124
2065 "for a in 1 2 3 do b; done",
2066 []parser.Token{
2067 {Type: TokenKeyword, Data: "for"},
2068 {Type: TokenWhitespace, Data: " "},
2069 {Type: TokenIdentifier, Data: "a"},
2070 {Type: TokenWhitespace, Data: " "},
2071 {Type: TokenKeyword, Data: "in"},
2072 {Type: TokenWhitespace, Data: " "},
2073 {Type: TokenWord, Data: "1"},
2074 {Type: TokenWhitespace, Data: " "},
2075 {Type: TokenWord, Data: "2"},
2076 {Type: TokenWhitespace, Data: " "},
2077 {Type: TokenWord, Data: "3"},
2078 {Type: TokenWhitespace, Data: " "},
2079 {Type: TokenWord, Data: "do"},
2080 {Type: TokenWhitespace, Data: " "},
2081 {Type: TokenWord, Data: "b"},
2082 {Type: TokenPunctuator, Data: ";"},
2083 {Type: TokenWhitespace, Data: " "},
2084 {Type: parser.TokenError, Data: "missing do"},
2085 },
2086 },
2087 { // 125
2088 "for ((a=1;a<2;a++)) do b; done",
2089 []parser.Token{
2090 {Type: TokenKeyword, Data: "for"},
2091 {Type: TokenWhitespace, Data: " "},
2092 {Type: TokenPunctuator, Data: "(("},
2093 {Type: TokenWord, Data: "a"},
2094 {Type: TokenPunctuator, Data: "="},
2095 {Type: TokenNumberLiteral, Data: "1"},
2096 {Type: TokenPunctuator, Data: ";"},
2097 {Type: TokenWord, Data: "a"},
2098 {Type: TokenPunctuator, Data: "<"},
2099 {Type: TokenNumberLiteral, Data: "2"},
2100 {Type: TokenPunctuator, Data: ";"},
2101 {Type: TokenWord, Data: "a"},
2102 {Type: TokenPunctuator, Data: "++"},
2103 {Type: TokenPunctuator, Data: "))"},
2104 {Type: TokenWhitespace, Data: " "},
2105 {Type: TokenKeyword, Data: "do"},
2106 {Type: TokenWhitespace, Data: " "},
2107 {Type: TokenWord, Data: "b"},
2108 {Type: TokenPunctuator, Data: ";"},
2109 {Type: TokenWhitespace, Data: " "},
2110 {Type: TokenKeyword, Data: "done"},
2111 {Type: parser.TokenDone, Data: ""},
2112 },
2113 },
2114 { // 126
2115 "for ((a=1;a<2;a++)); do b; done",
2116 []parser.Token{
2117 {Type: TokenKeyword, Data: "for"},
2118 {Type: TokenWhitespace, Data: " "},
2119 {Type: TokenPunctuator, Data: "(("},
2120 {Type: TokenWord, Data: "a"},
2121 {Type: TokenPunctuator, Data: "="},
2122 {Type: TokenNumberLiteral, Data: "1"},
2123 {Type: TokenPunctuator, Data: ";"},
2124 {Type: TokenWord, Data: "a"},
2125 {Type: TokenPunctuator, Data: "<"},
2126 {Type: TokenNumberLiteral, Data: "2"},
2127 {Type: TokenPunctuator, Data: ";"},
2128 {Type: TokenWord, Data: "a"},
2129 {Type: TokenPunctuator, Data: "++"},
2130 {Type: TokenPunctuator, Data: "))"},
2131 {Type: TokenPunctuator, Data: ";"},
2132 {Type: TokenWhitespace, Data: " "},
2133 {Type: TokenKeyword, Data: "do"},
2134 {Type: TokenWhitespace, Data: " "},
2135 {Type: TokenWord, Data: "b"},
2136 {Type: TokenPunctuator, Data: ";"},
2137 {Type: TokenWhitespace, Data: " "},
2138 {Type: TokenKeyword, Data: "done"},
2139 {Type: parser.TokenDone, Data: ""},
2140 },
2141 },
2142 { // 127
2143 "for ( a=1;a<2;a++ ); do b; done",
2144 []parser.Token{
2145 {Type: TokenKeyword, Data: "for"},
2146 {Type: TokenWhitespace, Data: " "},
2147 {Type: parser.TokenError, Data: "invalid character"},
2148 },
2149 },
2150 { // 128
2151 "select a; do b; done",
2152 []parser.Token{
2153 {Type: TokenKeyword, Data: "select"},
2154 {Type: TokenWhitespace, Data: " "},
2155 {Type: TokenIdentifier, Data: "a"},
2156 {Type: TokenPunctuator, Data: ";"},
2157 {Type: TokenWhitespace, Data: " "},
2158 {Type: TokenKeyword, Data: "do"},
2159 {Type: TokenWhitespace, Data: " "},
2160 {Type: TokenWord, Data: "b"},
2161 {Type: TokenPunctuator, Data: ";"},
2162 {Type: TokenWhitespace, Data: " "},
2163 {Type: TokenKeyword, Data: "done"},
2164 {Type: parser.TokenDone, Data: ""},
2165 },
2166 },
2167 { // 129
2168 "select a do b; done",
2169 []parser.Token{
2170 {Type: TokenKeyword, Data: "select"},
2171 {Type: TokenWhitespace, Data: " "},
2172 {Type: TokenIdentifier, Data: "a"},
2173 {Type: TokenWhitespace, Data: " "},
2174 {Type: TokenKeyword, Data: "do"},
2175 {Type: TokenWhitespace, Data: " "},
2176 {Type: TokenWord, Data: "b"},
2177 {Type: TokenPunctuator, Data: ";"},
2178 {Type: TokenWhitespace, Data: " "},
2179 {Type: TokenKeyword, Data: "done"},
2180 {Type: parser.TokenDone, Data: ""},
2181 },
2182 },
2183 { // 130
2184 "select a\ndo b; done",
2185 []parser.Token{
2186 {Type: TokenKeyword, Data: "select"},
2187 {Type: TokenWhitespace, Data: " "},
2188 {Type: TokenIdentifier, Data: "a"},
2189 {Type: TokenLineTerminator, Data: "\n"},
2190 {Type: TokenKeyword, Data: "do"},
2191 {Type: TokenWhitespace, Data: " "},
2192 {Type: TokenWord, Data: "b"},
2193 {Type: TokenPunctuator, Data: ";"},
2194 {Type: TokenWhitespace, Data: " "},
2195 {Type: TokenKeyword, Data: "done"},
2196 {Type: parser.TokenDone, Data: ""},
2197 },
2198 },
2199 { // 131
2200 "select a in 1 2 3; do b; done",
2201 []parser.Token{
2202 {Type: TokenKeyword, Data: "select"},
2203 {Type: TokenWhitespace, Data: " "},
2204 {Type: TokenIdentifier, Data: "a"},
2205 {Type: TokenWhitespace, Data: " "},
2206 {Type: TokenKeyword, Data: "in"},
2207 {Type: TokenWhitespace, Data: " "},
2208 {Type: TokenWord, Data: "1"},
2209 {Type: TokenWhitespace, Data: " "},
2210 {Type: TokenWord, Data: "2"},
2211 {Type: TokenWhitespace, Data: " "},
2212 {Type: TokenWord, Data: "3"},
2213 {Type: TokenPunctuator, Data: ";"},
2214 {Type: TokenWhitespace, Data: " "},
2215 {Type: TokenKeyword, Data: "do"},
2216 {Type: TokenWhitespace, Data: " "},
2217 {Type: TokenWord, Data: "b"},
2218 {Type: TokenPunctuator, Data: ";"},
2219 {Type: TokenWhitespace, Data: " "},
2220 {Type: TokenKeyword, Data: "done"},
2221 {Type: parser.TokenDone, Data: ""},
2222 },
2223 },
2224 { // 132
2225 "select a in 1 2 3 do b; done",
2226 []parser.Token{
2227 {Type: TokenKeyword, Data: "select"},
2228 {Type: TokenWhitespace, Data: " "},
2229 {Type: TokenIdentifier, Data: "a"},
2230 {Type: TokenWhitespace, Data: " "},
2231 {Type: TokenKeyword, Data: "in"},
2232 {Type: TokenWhitespace, Data: " "},
2233 {Type: TokenWord, Data: "1"},
2234 {Type: TokenWhitespace, Data: " "},
2235 {Type: TokenWord, Data: "2"},
2236 {Type: TokenWhitespace, Data: " "},
2237 {Type: TokenWord, Data: "3"},
2238 {Type: TokenWhitespace, Data: " "},
2239 {Type: TokenWord, Data: "do"},
2240 {Type: TokenWhitespace, Data: " "},
2241 {Type: TokenWord, Data: "b"},
2242 {Type: TokenPunctuator, Data: ";"},
2243 {Type: TokenWhitespace, Data: " "},
2244 {Type: parser.TokenError, Data: "missing do"},
2245 },
2246 },
2247 { // 133
2248
2249 "coproc a b",
2250 []parser.Token{
2251 {Type: TokenKeyword, Data: "coproc"},
2252 {Type: TokenWhitespace, Data: " "},
2253 {Type: TokenWord, Data: "a"},
2254 {Type: TokenWhitespace, Data: " "},
2255 {Type: TokenWord, Data: "b"},
2256 {Type: parser.TokenDone, Data: ""},
2257 },
2258 },
2259 { // 134
2260
2261 "coproc fora b",
2262 []parser.Token{
2263 {Type: TokenKeyword, Data: "coproc"},
2264 {Type: TokenWhitespace, Data: " "},
2265 {Type: TokenWord, Data: "fora"},
2266 {Type: TokenWhitespace, Data: " "},
2267 {Type: TokenWord, Data: "b"},
2268 {Type: parser.TokenDone, Data: ""},
2269 },
2270 },
2271 { // 135
2272 "coproc while a; do b; done",
2273 []parser.Token{
2274 {Type: TokenKeyword, Data: "coproc"},
2275 {Type: TokenWhitespace, Data: " "},
2276 {Type: TokenKeyword, Data: "while"},
2277 {Type: TokenWhitespace, Data: " "},
2278 {Type: TokenWord, Data: "a"},
2279 {Type: TokenPunctuator, Data: ";"},
2280 {Type: TokenWhitespace, Data: " "},
2281 {Type: TokenKeyword, Data: "do"},
2282 {Type: TokenWhitespace, Data: " "},
2283 {Type: TokenWord, Data: "b"},
2284 {Type: TokenPunctuator, Data: ";"},
2285 {Type: TokenWhitespace, Data: " "},
2286 {Type: TokenKeyword, Data: "done"},
2287 {Type: parser.TokenDone, Data: ""},
2288 },
2289 },
2290 { // 136
2291 "coproc a while b; do c; done",
2292 []parser.Token{
2293 {Type: TokenKeyword, Data: "coproc"},
2294 {Type: TokenWhitespace, Data: " "},
2295 {Type: TokenIdentifier, Data: "a"},
2296 {Type: TokenWhitespace, Data: " "},
2297 {Type: TokenKeyword, Data: "while"},
2298 {Type: TokenWhitespace, Data: " "},
2299 {Type: TokenWord, Data: "b"},
2300 {Type: TokenPunctuator, Data: ";"},
2301 {Type: TokenWhitespace, Data: " "},
2302 {Type: TokenKeyword, Data: "do"},
2303 {Type: TokenWhitespace, Data: " "},
2304 {Type: TokenWord, Data: "c"},
2305 {Type: TokenPunctuator, Data: ";"},
2306 {Type: TokenWhitespace, Data: " "},
2307 {Type: TokenKeyword, Data: "done"},
2308 {Type: parser.TokenDone, Data: ""},
2309 },
2310 },
2311 { // 137
2312 "echo }",
2313 []parser.Token{
2314 {Type: TokenWord, Data: "echo"},
2315 {Type: TokenWhitespace, Data: " "},
2316 {Type: TokenPunctuator, Data: "}"},
2317 {Type: parser.TokenDone, Data: ""},
2318 },
2319 },
2320 { // 138
2321 "{ echo }",
2322 []parser.Token{
2323 {Type: TokenPunctuator, Data: "{"},
2324 {Type: TokenWhitespace, Data: " "},
2325 {Type: TokenWord, Data: "echo"},
2326 {Type: TokenWhitespace, Data: " "},
2327 {Type: TokenPunctuator, Data: "}"},
2328 {Type: parser.TokenError, Data: "unexpected EOF"},
2329 },
2330 },
2331 { // 139
2332 "{ echo };}",
2333 []parser.Token{
2334 {Type: TokenPunctuator, Data: "{"},
2335 {Type: TokenWhitespace, Data: " "},
2336 {Type: TokenWord, Data: "echo"},
2337 {Type: TokenWhitespace, Data: " "},
2338 {Type: TokenPunctuator, Data: "}"},
2339 {Type: TokenPunctuator, Data: ";"},
2340 {Type: TokenPunctuator, Data: "}"},
2341 {Type: parser.TokenDone, Data: ""},
2342 },
2343 },
2344 { // 140
2345 "function a{ b; }",
2346 []parser.Token{
2347 {Type: TokenKeyword, Data: "function"},
2348 {Type: TokenWhitespace, Data: " "},
2349 {Type: TokenFunctionIdentifier, Data: "a"},
2350 {Type: TokenPunctuator, Data: "{"},
2351 {Type: TokenWhitespace, Data: " "},
2352 {Type: TokenWord, Data: "b"},
2353 {Type: TokenPunctuator, Data: ";"},
2354 {Type: TokenWhitespace, Data: " "},
2355 {Type: TokenPunctuator, Data: "}"},
2356 {Type: parser.TokenDone, Data: ""},
2357 },
2358 },
2359 { // 141
2360 "function a{ b; }\nfunction a\n{ b; }",
2361 []parser.Token{
2362 {Type: TokenKeyword, Data: "function"},
2363 {Type: TokenWhitespace, Data: " "},
2364 {Type: TokenFunctionIdentifier, Data: "a"},
2365 {Type: TokenPunctuator, Data: "{"},
2366 {Type: TokenWhitespace, Data: " "},
2367 {Type: TokenWord, Data: "b"},
2368 {Type: TokenPunctuator, Data: ";"},
2369 {Type: TokenWhitespace, Data: " "},
2370 {Type: TokenPunctuator, Data: "}"},
2371 {Type: TokenLineTerminator, Data: "\n"},
2372 {Type: TokenKeyword, Data: "function"},
2373 {Type: TokenWhitespace, Data: " "},
2374 {Type: TokenFunctionIdentifier, Data: "a"},
2375 {Type: TokenLineTerminator, Data: "\n"},
2376 {Type: TokenPunctuator, Data: "{"},
2377 {Type: TokenWhitespace, Data: " "},
2378 {Type: TokenWord, Data: "b"},
2379 {Type: TokenPunctuator, Data: ";"},
2380 {Type: TokenWhitespace, Data: " "},
2381 {Type: TokenPunctuator, Data: "}"},
2382 {Type: parser.TokenDone, Data: ""},
2383 },
2384 },
2385 { // 142
2386 "function a\n{ b; }",
2387 []parser.Token{
2388 {Type: TokenKeyword, Data: "function"},
2389 {Type: TokenWhitespace, Data: " "},
2390 {Type: TokenFunctionIdentifier, Data: "a"},
2391 {Type: TokenLineTerminator, Data: "\n"},
2392 {Type: TokenPunctuator, Data: "{"},
2393 {Type: TokenWhitespace, Data: " "},
2394 {Type: TokenWord, Data: "b"},
2395 {Type: TokenPunctuator, Data: ";"},
2396 {Type: TokenWhitespace, Data: " "},
2397 {Type: TokenPunctuator, Data: "}"},
2398 {Type: parser.TokenDone, Data: ""},
2399 },
2400 },
2401 { // 143
2402 "function a(){ b; }",
2403 []parser.Token{
2404 {Type: TokenKeyword, Data: "function"},
2405 {Type: TokenWhitespace, Data: " "},
2406 {Type: TokenFunctionIdentifier, Data: "a"},
2407 {Type: TokenPunctuator, Data: "("},
2408 {Type: TokenPunctuator, Data: ")"},
2409 {Type: TokenPunctuator, Data: "{"},
2410 {Type: TokenWhitespace, Data: " "},
2411 {Type: TokenWord, Data: "b"},
2412 {Type: TokenPunctuator, Data: ";"},
2413 {Type: TokenWhitespace, Data: " "},
2414 {Type: TokenPunctuator, Data: "}"},
2415 {Type: parser.TokenDone, Data: ""},
2416 },
2417 },
2418 { // 144
2419 "function a ( ) { b; }",
2420 []parser.Token{
2421 {Type: TokenKeyword, Data: "function"},
2422 {Type: TokenWhitespace, Data: " "},
2423 {Type: TokenFunctionIdentifier, Data: "a"},
2424 {Type: TokenWhitespace, Data: " "},
2425 {Type: TokenPunctuator, Data: "("},
2426 {Type: TokenWhitespace, Data: " "},
2427 {Type: TokenPunctuator, Data: ")"},
2428 {Type: TokenWhitespace, Data: " "},
2429 {Type: TokenPunctuator, Data: "{"},
2430 {Type: TokenWhitespace, Data: " "},
2431 {Type: TokenWord, Data: "b"},
2432 {Type: TokenPunctuator, Data: ";"},
2433 {Type: TokenWhitespace, Data: " "},
2434 {Type: TokenPunctuator, Data: "}"},
2435 {Type: parser.TokenDone, Data: ""},
2436 },
2437 },
2438 { // 145
2439 "function a() b",
2440 []parser.Token{
2441 {Type: TokenKeyword, Data: "function"},
2442 {Type: TokenWhitespace, Data: " "},
2443 {Type: TokenFunctionIdentifier, Data: "a"},
2444 {Type: TokenPunctuator, Data: "("},
2445 {Type: TokenPunctuator, Data: ")"},
2446 {Type: TokenWhitespace, Data: " "},
2447 {Type: parser.TokenError, Data: "invalid keyword"},
2448 },
2449 },
2450 { // 146
2451 "a(){ b; }",
2452 []parser.Token{
2453 {Type: TokenFunctionIdentifier, Data: "a"},
2454 {Type: TokenPunctuator, Data: "("},
2455 {Type: TokenPunctuator, Data: ")"},
2456 {Type: TokenPunctuator, Data: "{"},
2457 {Type: TokenWhitespace, Data: " "},
2458 {Type: TokenWord, Data: "b"},
2459 {Type: TokenPunctuator, Data: ";"},
2460 {Type: TokenWhitespace, Data: " "},
2461 {Type: TokenPunctuator, Data: "}"},
2462 {Type: parser.TokenDone, Data: ""},
2463 },
2464 },
2465 { // 147
2466 "a( ) { b; }",
2467 []parser.Token{
2468 {Type: TokenFunctionIdentifier, Data: "a"},
2469 {Type: TokenPunctuator, Data: "("},
2470 {Type: TokenWhitespace, Data: " "},
2471 {Type: TokenPunctuator, Data: ")"},
2472 {Type: TokenWhitespace, Data: " "},
2473 {Type: TokenPunctuator, Data: "{"},
2474 {Type: TokenWhitespace, Data: " "},
2475 {Type: TokenWord, Data: "b"},
2476 {Type: TokenPunctuator, Data: ";"},
2477 {Type: TokenWhitespace, Data: " "},
2478 {Type: TokenPunctuator, Data: "}"},
2479 {Type: parser.TokenDone, Data: ""},
2480 },
2481 },
2482 { // 148
2483 "a() b",
2484 []parser.Token{
2485 {Type: TokenFunctionIdentifier, Data: "a"},
2486 {Type: TokenPunctuator, Data: "("},
2487 {Type: TokenPunctuator, Data: ")"},
2488 {Type: TokenWhitespace, Data: " "},
2489 {Type: parser.TokenError, Data: "invalid keyword"},
2490 },
2491 },
2492 { // 149
2493 "a() b",
2494 []parser.Token{
2495 {Type: TokenFunctionIdentifier, Data: "a"},
2496 {Type: TokenPunctuator, Data: "("},
2497 {Type: TokenPunctuator, Data: ")"},
2498 {Type: TokenWhitespace, Data: " "},
2499 {Type: parser.TokenError, Data: "invalid keyword"},
2500 },
2501 },
2502 { // 150
2503 "[[ -f file ]]",
2504 []parser.Token{
2505 {Type: TokenKeyword, Data: "[["},
2506 {Type: TokenWhitespace, Data: " "},
2507 {Type: TokenKeyword, Data: "-f"},
2508 {Type: TokenWhitespace, Data: " "},
2509 {Type: TokenWord, Data: "file"},
2510 {Type: TokenWhitespace, Data: " "},
2511 {Type: TokenKeyword, Data: "]]"},
2512 {Type: parser.TokenDone, Data: ""},
2513 },
2514 },
2515 { // 151
2516 "[[ ! -e file\"str\" ]]",
2517 []parser.Token{
2518 {Type: TokenKeyword, Data: "[["},
2519 {Type: TokenWhitespace, Data: " "},
2520 {Type: TokenPunctuator, Data: "!"},
2521 {Type: TokenWhitespace, Data: " "},
2522 {Type: TokenKeyword, Data: "-e"},
2523 {Type: TokenWhitespace, Data: " "},
2524 {Type: TokenWord, Data: "file"},
2525 {Type: TokenString, Data: "\"str\""},
2526 {Type: TokenWhitespace, Data: " "},
2527 {Type: TokenKeyword, Data: "]]"},
2528 {Type: parser.TokenDone, Data: ""},
2529 },
2530 },
2531 { // 152
2532 "[[ -S \"str\"a || -g $b\"c\"d ]]",
2533 []parser.Token{
2534 {Type: TokenKeyword, Data: "[["},
2535 {Type: TokenWhitespace, Data: " "},
2536 {Type: TokenKeyword, Data: "-S"},
2537 {Type: TokenWhitespace, Data: " "},
2538 {Type: TokenString, Data: "\"str\""},
2539 {Type: TokenWord, Data: "a"},
2540 {Type: TokenWhitespace, Data: " "},
2541 {Type: TokenPunctuator, Data: "||"},
2542 {Type: TokenWhitespace, Data: " "},
2543 {Type: TokenKeyword, Data: "-g"},
2544 {Type: TokenWhitespace, Data: " "},
2545 {Type: TokenIdentifier, Data: "$b"},
2546 {Type: TokenString, Data: "\"c\""},
2547 {Type: TokenWord, Data: "d"},
2548 {Type: TokenWhitespace, Data: " "},
2549 {Type: TokenKeyword, Data: "]]"},
2550 {Type: parser.TokenDone, Data: ""},
2551 },
2552 },
2553 { // 153
2554 "[[ a = b ]]",
2555 []parser.Token{
2556 {Type: TokenKeyword, Data: "[["},
2557 {Type: TokenWhitespace, Data: " "},
2558 {Type: TokenWord, Data: "a"},
2559 {Type: TokenWhitespace, Data: " "},
2560 {Type: TokenBinaryOperator, Data: "="},
2561 {Type: TokenWhitespace, Data: " "},
2562 {Type: TokenPattern, Data: "b"},
2563 {Type: TokenWhitespace, Data: " "},
2564 {Type: TokenKeyword, Data: "]]"},
2565 {Type: parser.TokenDone, Data: ""},
2566 },
2567 },
2568 { // 154
2569 "[[ a$b = c\"d\" && e\"f\"g != \"h\"$i ]]",
2570 []parser.Token{
2571 {Type: TokenKeyword, Data: "[["},
2572 {Type: TokenWhitespace, Data: " "},
2573 {Type: TokenWord, Data: "a"},
2574 {Type: TokenIdentifier, Data: "$b"},
2575 {Type: TokenWhitespace, Data: " "},
2576 {Type: TokenBinaryOperator, Data: "="},
2577 {Type: TokenWhitespace, Data: " "},
2578 {Type: TokenPattern, Data: "c"},
2579 {Type: TokenString, Data: "\"d\""},
2580 {Type: TokenWhitespace, Data: " "},
2581 {Type: TokenPunctuator, Data: "&&"},
2582 {Type: TokenWhitespace, Data: " "},
2583 {Type: TokenWord, Data: "e"},
2584 {Type: TokenString, Data: "\"f\""},
2585 {Type: TokenWord, Data: "g"},
2586 {Type: TokenWhitespace, Data: " "},
2587 {Type: TokenBinaryOperator, Data: "!="},
2588 {Type: TokenWhitespace, Data: " "},
2589 {Type: TokenString, Data: "\"h\""},
2590 {Type: TokenIdentifier, Data: "$i"},
2591 {Type: TokenWhitespace, Data: " "},
2592 {Type: TokenKeyword, Data: "]]"},
2593 {Type: parser.TokenDone, Data: ""},
2594 },
2595 },
2596 { // 155
2597 "[[ a -gt b ]]",
2598 []parser.Token{
2599 {Type: TokenKeyword, Data: "[["},
2600 {Type: TokenWhitespace, Data: " "},
2601 {Type: TokenWord, Data: "a"},
2602 {Type: TokenWhitespace, Data: " "},
2603 {Type: TokenKeyword, Data: "-gt"},
2604 {Type: TokenWhitespace, Data: " "},
2605 {Type: TokenWord, Data: "b"},
2606 {Type: TokenWhitespace, Data: " "},
2607 {Type: TokenKeyword, Data: "]]"},
2608 {Type: parser.TokenDone, Data: ""},
2609 },
2610 },
2611 { // 156
2612 "[[ # A\n# B\n\n# C\na -gt b # D\n]]",
2613 []parser.Token{
2614 {Type: TokenKeyword, Data: "[["},
2615 {Type: TokenWhitespace, Data: " "},
2616 {Type: TokenComment, Data: "# A"},
2617 {Type: TokenLineTerminator, Data: "\n"},
2618 {Type: TokenComment, Data: "# B"},
2619 {Type: TokenLineTerminator, Data: "\n\n"},
2620 {Type: TokenComment, Data: "# C"},
2621 {Type: TokenLineTerminator, Data: "\n"},
2622 {Type: TokenWord, Data: "a"},
2623 {Type: TokenWhitespace, Data: " "},
2624 {Type: TokenKeyword, Data: "-gt"},
2625 {Type: TokenWhitespace, Data: " "},
2626 {Type: TokenWord, Data: "b"},
2627 {Type: TokenWhitespace, Data: " "},
2628 {Type: TokenComment, Data: "# D"},
2629 {Type: TokenLineTerminator, Data: "\n"},
2630 {Type: TokenKeyword, Data: "]]"},
2631 {Type: parser.TokenDone, Data: ""},
2632 },
2633 },
2634 { // 157
2635 "[[ a$b -eq c\"d\" && e\"f\"g -ne \"h\"$i ]]",
2636 []parser.Token{
2637 {Type: TokenKeyword, Data: "[["},
2638 {Type: TokenWhitespace, Data: " "},
2639 {Type: TokenWord, Data: "a"},
2640 {Type: TokenIdentifier, Data: "$b"},
2641 {Type: TokenWhitespace, Data: " "},
2642 {Type: TokenKeyword, Data: "-eq"},
2643 {Type: TokenWhitespace, Data: " "},
2644 {Type: TokenWord, Data: "c"},
2645 {Type: TokenString, Data: "\"d\""},
2646 {Type: TokenWhitespace, Data: " "},
2647 {Type: TokenPunctuator, Data: "&&"},
2648 {Type: TokenWhitespace, Data: " "},
2649 {Type: TokenWord, Data: "e"},
2650 {Type: TokenString, Data: "\"f\""},
2651 {Type: TokenWord, Data: "g"},
2652 {Type: TokenWhitespace, Data: " "},
2653 {Type: TokenKeyword, Data: "-ne"},
2654 {Type: TokenWhitespace, Data: " "},
2655 {Type: TokenString, Data: "\"h\""},
2656 {Type: TokenIdentifier, Data: "$i"},
2657 {Type: TokenWhitespace, Data: " "},
2658 {Type: TokenKeyword, Data: "]]"},
2659 {Type: parser.TokenDone, Data: ""},
2660 },
2661 },
2662 { // 158
2663 "[[ (a = b || c = d) && $e -le $f ]]",
2664 []parser.Token{
2665 {Type: TokenKeyword, Data: "[["},
2666 {Type: TokenWhitespace, Data: " "},
2667 {Type: TokenPunctuator, Data: "("},
2668 {Type: TokenWord, Data: "a"},
2669 {Type: TokenWhitespace, Data: " "},
2670 {Type: TokenBinaryOperator, Data: "="},
2671 {Type: TokenWhitespace, Data: " "},
2672 {Type: TokenPattern, Data: "b"},
2673 {Type: TokenWhitespace, Data: " "},
2674 {Type: TokenPunctuator, Data: "||"},
2675 {Type: TokenWhitespace, Data: " "},
2676 {Type: TokenWord, Data: "c"},
2677 {Type: TokenWhitespace, Data: " "},
2678 {Type: TokenBinaryOperator, Data: "="},
2679 {Type: TokenWhitespace, Data: " "},
2680 {Type: TokenPattern, Data: "d"},
2681 {Type: TokenPunctuator, Data: ")"},
2682 {Type: TokenWhitespace, Data: " "},
2683 {Type: TokenPunctuator, Data: "&&"},
2684 {Type: TokenWhitespace, Data: " "},
2685 {Type: TokenIdentifier, Data: "$e"},
2686 {Type: TokenWhitespace, Data: " "},
2687 {Type: TokenKeyword, Data: "-le"},
2688 {Type: TokenWhitespace, Data: " "},
2689 {Type: TokenIdentifier, Data: "$f"},
2690 {Type: TokenWhitespace, Data: " "},
2691 {Type: TokenKeyword, Data: "]]"},
2692 {Type: parser.TokenDone, Data: ""},
2693 },
2694 },
2695 { // 159
2696 "[[ (a=b) ]]",
2697 []parser.Token{
2698 {Type: TokenKeyword, Data: "[["},
2699 {Type: TokenWhitespace, Data: " "},
2700 {Type: TokenPunctuator, Data: "("},
2701 {Type: TokenWord, Data: "a=b"},
2702 {Type: TokenPunctuator, Data: ")"},
2703 {Type: TokenWhitespace, Data: " "},
2704 {Type: TokenKeyword, Data: "]]"},
2705 {Type: parser.TokenDone, Data: ""},
2706 },
2707 },
2708 { // 160
2709 "[[ a < b ]]",
2710 []parser.Token{
2711 {Type: TokenKeyword, Data: "[["},
2712 {Type: TokenWhitespace, Data: " "},
2713 {Type: TokenWord, Data: "a"},
2714 {Type: TokenWhitespace, Data: " "},
2715 {Type: TokenBinaryOperator, Data: "<"},
2716 {Type: TokenWhitespace, Data: " "},
2717 {Type: TokenPattern, Data: "b"},
2718 {Type: TokenWhitespace, Data: " "},
2719 {Type: TokenKeyword, Data: "]]"},
2720 {Type: parser.TokenDone, Data: ""},
2721 },
2722 },
2723 { // 161
2724 "[[ a<b ]]",
2725 []parser.Token{
2726 {Type: TokenKeyword, Data: "[["},
2727 {Type: TokenWhitespace, Data: " "},
2728 {Type: TokenWord, Data: "a"},
2729 {Type: TokenBinaryOperator, Data: "<"},
2730 {Type: TokenPattern, Data: "b"},
2731 {Type: TokenWhitespace, Data: " "},
2732 {Type: TokenKeyword, Data: "]]"},
2733 {Type: parser.TokenDone, Data: ""},
2734 },
2735 },
2736 { // 162
2737 "[[ (a = b) ]]",
2738 []parser.Token{
2739 {Type: TokenKeyword, Data: "[["},
2740 {Type: TokenWhitespace, Data: " "},
2741 {Type: TokenPunctuator, Data: "("},
2742 {Type: TokenWord, Data: "a"},
2743 {Type: TokenWhitespace, Data: " "},
2744 {Type: TokenBinaryOperator, Data: "="},
2745 {Type: TokenWhitespace, Data: " "},
2746 {Type: TokenPattern, Data: "b"},
2747 {Type: TokenPunctuator, Data: ")"},
2748 {Type: TokenWhitespace, Data: " "},
2749 {Type: TokenKeyword, Data: "]]"},
2750 {Type: parser.TokenDone, Data: ""},
2751 },
2752 },
2753 { // 163
2754 "[[ (a -gt b) ]]",
2755 []parser.Token{
2756 {Type: TokenKeyword, Data: "[["},
2757 {Type: TokenWhitespace, Data: " "},
2758 {Type: TokenPunctuator, Data: "("},
2759 {Type: TokenWord, Data: "a"},
2760 {Type: TokenWhitespace, Data: " "},
2761 {Type: TokenKeyword, Data: "-gt"},
2762 {Type: TokenWhitespace, Data: " "},
2763 {Type: TokenWord, Data: "b"},
2764 {Type: TokenPunctuator, Data: ")"},
2765 {Type: TokenWhitespace, Data: " "},
2766 {Type: TokenKeyword, Data: "]]"},
2767 {Type: parser.TokenDone, Data: ""},
2768 },
2769 },
2770 { // 164
2771 "[[\na\n=\nb\n]]",
2772 []parser.Token{
2773 {Type: TokenKeyword, Data: "[["},
2774 {Type: TokenLineTerminator, Data: "\n"},
2775 {Type: TokenWord, Data: "a"},
2776 {Type: TokenLineTerminator, Data: "\n"},
2777 {Type: TokenBinaryOperator, Data: "="},
2778 {Type: TokenLineTerminator, Data: "\n"},
2779 {Type: TokenPattern, Data: "b"},
2780 {Type: TokenLineTerminator, Data: "\n"},
2781 {Type: TokenKeyword, Data: "]]"},
2782 {Type: parser.TokenDone, Data: ""},
2783 },
2784 },
2785 { // 165
2786 "[[\n(a=b)\n]]",
2787 []parser.Token{
2788 {Type: TokenKeyword, Data: "[["},
2789 {Type: TokenLineTerminator, Data: "\n"},
2790 {Type: TokenPunctuator, Data: "("},
2791 {Type: TokenWord, Data: "a=b"},
2792 {Type: TokenPunctuator, Data: ")"},
2793 {Type: TokenLineTerminator, Data: "\n"},
2794 {Type: TokenKeyword, Data: "]]"},
2795 {Type: parser.TokenDone, Data: ""},
2796 },
2797 },
2798 { // 166
2799 "[[ ",
2800 []parser.Token{
2801 {Type: TokenKeyword, Data: "[["},
2802 {Type: TokenWhitespace, Data: " "},
2803 {Type: parser.TokenError, Data: "unexpected EOF"},
2804 },
2805 },
2806 { // 167
2807 "[[ | = b ]]",
2808 []parser.Token{
2809 {Type: TokenKeyword, Data: "[["},
2810 {Type: TokenWhitespace, Data: " "},
2811 {Type: parser.TokenError, Data: "invalid character"},
2812 },
2813 },
2814 { // 168
2815 "[[ & = b ]]",
2816 []parser.Token{
2817 {Type: TokenKeyword, Data: "[["},
2818 {Type: TokenWhitespace, Data: " "},
2819 {Type: parser.TokenError, Data: "invalid character"},
2820 },
2821 },
2822 { // 169
2823 "[[ \"a\" = b ]]",
2824 []parser.Token{
2825 {Type: TokenKeyword, Data: "[["},
2826 {Type: TokenWhitespace, Data: " "},
2827 {Type: TokenString, Data: "\"a\""},
2828 {Type: TokenWhitespace, Data: " "},
2829 {Type: TokenBinaryOperator, Data: "="},
2830 {Type: TokenWhitespace, Data: " "},
2831 {Type: TokenPattern, Data: "b"},
2832 {Type: TokenWhitespace, Data: " "},
2833 {Type: TokenKeyword, Data: "]]"},
2834 {Type: parser.TokenDone, Data: ""},
2835 },
2836 },
2837 { // 170
2838 "[[ ]]a = ]]b ]]",
2839 []parser.Token{
2840 {Type: TokenKeyword, Data: "[["},
2841 {Type: TokenWhitespace, Data: " "},
2842 {Type: TokenWord, Data: "]]a"},
2843 {Type: TokenWhitespace, Data: " "},
2844 {Type: TokenBinaryOperator, Data: "="},
2845 {Type: TokenWhitespace, Data: " "},
2846 {Type: TokenPattern, Data: "]]b"},
2847 {Type: TokenWhitespace, Data: " "},
2848 {Type: TokenKeyword, Data: "]]"},
2849 {Type: parser.TokenDone, Data: ""},
2850 },
2851 },
2852 { // 171
2853 "[[ ( a = ]]b ) ]]",
2854 []parser.Token{
2855 {Type: TokenKeyword, Data: "[["},
2856 {Type: TokenWhitespace, Data: " "},
2857 {Type: TokenPunctuator, Data: "("},
2858 {Type: TokenWhitespace, Data: " "},
2859 {Type: TokenWord, Data: "a"},
2860 {Type: TokenWhitespace, Data: " "},
2861 {Type: TokenBinaryOperator, Data: "="},
2862 {Type: TokenWhitespace, Data: " "},
2863 {Type: TokenPattern, Data: "]]b"},
2864 {Type: TokenWhitespace, Data: " "},
2865 {Type: TokenPunctuator, Data: ")"},
2866 {Type: TokenWhitespace, Data: " "},
2867 {Type: TokenKeyword, Data: "]]"},
2868 {Type: parser.TokenDone, Data: ""},
2869 },
2870 },
2871 { // 172
2872 "[[ ) = ) ]]",
2873 []parser.Token{
2874 {Type: TokenKeyword, Data: "[["},
2875 {Type: TokenWhitespace, Data: " "},
2876 {Type: parser.TokenError, Data: "invalid character"},
2877 },
2878 },
2879 { // 173
2880 "[[ ( ]] ) ]]",
2881 []parser.Token{
2882 {Type: TokenKeyword, Data: "[["},
2883 {Type: TokenWhitespace, Data: " "},
2884 {Type: TokenPunctuator, Data: "("},
2885 {Type: TokenWhitespace, Data: " "},
2886 {Type: parser.TokenError, Data: "invalid character"},
2887 },
2888 },
2889 { // 174
2890 "[[ a \n= b ]]",
2891 []parser.Token{
2892 {Type: TokenKeyword, Data: "[["},
2893 {Type: TokenWhitespace, Data: " "},
2894 {Type: TokenWord, Data: "a"},
2895 {Type: TokenWhitespace, Data: " "},
2896 {Type: TokenLineTerminator, Data: "\n"},
2897 {Type: TokenBinaryOperator, Data: "="},
2898 {Type: TokenWhitespace, Data: " "},
2899 {Type: TokenPattern, Data: "b"},
2900 {Type: TokenWhitespace, Data: " "},
2901 {Type: TokenKeyword, Data: "]]"},
2902 {Type: parser.TokenDone, Data: ""},
2903 },
2904 },
2905 { // 175
2906 "[[ a\n = b ]]",
2907 []parser.Token{
2908 {Type: TokenKeyword, Data: "[["},
2909 {Type: TokenWhitespace, Data: " "},
2910 {Type: TokenWord, Data: "a"},
2911 {Type: TokenLineTerminator, Data: "\n"},
2912 {Type: TokenWhitespace, Data: " "},
2913 {Type: TokenBinaryOperator, Data: "="},
2914 {Type: TokenWhitespace, Data: " "},
2915 {Type: TokenPattern, Data: "b"},
2916 {Type: TokenWhitespace, Data: " "},
2917 {Type: TokenKeyword, Data: "]]"},
2918 {Type: parser.TokenDone, Data: ""},
2919 },
2920 },
2921 { // 176
2922 "[[ a ",
2923 []parser.Token{
2924 {Type: TokenKeyword, Data: "[["},
2925 {Type: TokenWhitespace, Data: " "},
2926 {Type: TokenWord, Data: "a"},
2927 {Type: TokenWhitespace, Data: " "},
2928 {Type: parser.TokenError, Data: "unexpected EOF"},
2929 },
2930 },
2931 { // 177
2932 "[[ a ! b ]]",
2933 []parser.Token{
2934 {Type: TokenKeyword, Data: "[["},
2935 {Type: TokenWhitespace, Data: " "},
2936 {Type: TokenWord, Data: "a"},
2937 {Type: TokenWhitespace, Data: " "},
2938 {Type: parser.TokenError, Data: "invalid character"},
2939 },
2940 },
2941 { // 178
2942 "[[ a -ez b ]]",
2943 []parser.Token{
2944 {Type: TokenKeyword, Data: "[["},
2945 {Type: TokenWhitespace, Data: " "},
2946 {Type: TokenWord, Data: "a"},
2947 {Type: TokenWhitespace, Data: " "},
2948 {Type: parser.TokenError, Data: "invalid character"},
2949 },
2950 },
2951 { // 179
2952 "[[ a -nz b ]]",
2953 []parser.Token{
2954 {Type: TokenKeyword, Data: "[["},
2955 {Type: TokenWhitespace, Data: " "},
2956 {Type: TokenWord, Data: "a"},
2957 {Type: TokenWhitespace, Data: " "},
2958 {Type: parser.TokenError, Data: "invalid character"},
2959 },
2960 },
2961 { // 180
2962 "[[ a -gz b ]]",
2963 []parser.Token{
2964 {Type: TokenKeyword, Data: "[["},
2965 {Type: TokenWhitespace, Data: " "},
2966 {Type: TokenWord, Data: "a"},
2967 {Type: TokenWhitespace, Data: " "},
2968 {Type: parser.TokenError, Data: "invalid character"},
2969 },
2970 },
2971 { // 181
2972 "[[ a -lz b ]]",
2973 []parser.Token{
2974 {Type: TokenKeyword, Data: "[["},
2975 {Type: TokenWhitespace, Data: " "},
2976 {Type: TokenWord, Data: "a"},
2977 {Type: TokenWhitespace, Data: " "},
2978 {Type: parser.TokenError, Data: "invalid character"},
2979 },
2980 },
2981 { // 182
2982 "[[ a -oz b ]]",
2983 []parser.Token{
2984 {Type: TokenKeyword, Data: "[["},
2985 {Type: TokenWhitespace, Data: " "},
2986 {Type: TokenWord, Data: "a"},
2987 {Type: TokenWhitespace, Data: " "},
2988 {Type: parser.TokenError, Data: "invalid character"},
2989 },
2990 },
2991 { // 183
2992 "[[ a -z b ]]",
2993 []parser.Token{
2994 {Type: TokenKeyword, Data: "[["},
2995 {Type: TokenWhitespace, Data: " "},
2996 {Type: TokenWord, Data: "a"},
2997 {Type: TokenWhitespace, Data: " "},
2998 {Type: parser.TokenError, Data: "invalid character"},
2999 },
3000 },
3001 { // 184
3002 "[[ -z < ]]",
3003 []parser.Token{
3004 {Type: TokenKeyword, Data: "[["},
3005 {Type: TokenWhitespace, Data: " "},
3006 {Type: TokenKeyword, Data: "-z"},
3007 {Type: TokenWhitespace, Data: " "},
3008 {Type: parser.TokenError, Data: "invalid character"},
3009 },
3010 },
3011 { // 185
3012 "[[ -z \n a ]]",
3013 []parser.Token{
3014 {Type: TokenKeyword, Data: "[["},
3015 {Type: TokenWhitespace, Data: " "},
3016 {Type: TokenKeyword, Data: "-z"},
3017 {Type: TokenWhitespace, Data: " "},
3018 {Type: TokenLineTerminator, Data: "\n"},
3019 {Type: TokenWhitespace, Data: " "},
3020 {Type: TokenWord, Data: "a"},
3021 {Type: TokenWhitespace, Data: " "},
3022 {Type: TokenKeyword, Data: "]]"},
3023 {Type: parser.TokenDone, Data: ""},
3024 },
3025 },
3026 { // 186
3027 "[[ a = b\\nc ]]",
3028 []parser.Token{
3029 {Type: TokenKeyword, Data: "[["},
3030 {Type: TokenWhitespace, Data: " "},
3031 {Type: TokenWord, Data: "a"},
3032 {Type: TokenWhitespace, Data: " "},
3033 {Type: TokenBinaryOperator, Data: "="},
3034 {Type: TokenWhitespace, Data: " "},
3035 {Type: TokenPattern, Data: "b\\nc"},
3036 {Type: TokenWhitespace, Data: " "},
3037 {Type: TokenKeyword, Data: "]]"},
3038 {Type: parser.TokenDone, Data: ""},
3039 },
3040 },
3041 { // 187
3042 "[[ a = b",
3043 []parser.Token{
3044 {Type: TokenKeyword, Data: "[["},
3045 {Type: TokenWhitespace, Data: " "},
3046 {Type: TokenWord, Data: "a"},
3047 {Type: TokenWhitespace, Data: " "},
3048 {Type: TokenBinaryOperator, Data: "="},
3049 {Type: TokenWhitespace, Data: " "},
3050 {Type: parser.TokenError, Data: "unexpected EOF"},
3051 },
3052 },
3053 { // 188
3054 "[[ -z `a` ]]",
3055 []parser.Token{
3056 {Type: TokenKeyword, Data: "[["},
3057 {Type: TokenWhitespace, Data: " "},
3058 {Type: TokenKeyword, Data: "-z"},
3059 {Type: TokenWhitespace, Data: " "},
3060 {Type: TokenOpenBacktick, Data: "`"},
3061 {Type: TokenWord, Data: "a"},
3062 {Type: TokenCloseBacktick, Data: "`"},
3063 {Type: TokenWhitespace, Data: " "},
3064 {Type: TokenKeyword, Data: "]]"},
3065 {Type: parser.TokenDone, Data: ""},
3066 },
3067 },
3068 { // 189
3069 "[[ -z | ]]",
3070 []parser.Token{
3071 {Type: TokenKeyword, Data: "[["},
3072 {Type: TokenWhitespace, Data: " "},
3073 {Type: TokenKeyword, Data: "-z"},
3074 {Type: TokenWhitespace, Data: " "},
3075 {Type: parser.TokenError, Data: "invalid character"},
3076 },
3077 },
3078 { // 190
3079 "[[ -z a",
3080 []parser.Token{
3081 {Type: TokenKeyword, Data: "[["},
3082 {Type: TokenWhitespace, Data: " "},
3083 {Type: TokenKeyword, Data: "-z"},
3084 {Type: TokenWhitespace, Data: " "},
3085 {Type: TokenWord, Data: "a"},
3086 {Type: parser.TokenError, Data: "unexpected EOF"},
3087 },
3088 },
3089 { // 191
3090 "[[ -z ",
3091 []parser.Token{
3092 {Type: TokenKeyword, Data: "[["},
3093 {Type: TokenWhitespace, Data: " "},
3094 {Type: TokenKeyword, Data: "-z"},
3095 {Type: TokenWhitespace, Data: " "},
3096 {Type: parser.TokenError, Data: "unexpected EOF"},
3097 },
3098 },
3099 { // 192
3100 "[[ -z #comment\na ]]",
3101 []parser.Token{
3102 {Type: TokenKeyword, Data: "[["},
3103 {Type: TokenWhitespace, Data: " "},
3104 {Type: TokenKeyword, Data: "-z"},
3105 {Type: TokenWhitespace, Data: " "},
3106 {Type: parser.TokenError, Data: "invalid character"},
3107 },
3108 },
3109 { // 193
3110 "[[ a -eq #comment\nb ]]",
3111 []parser.Token{
3112 {Type: TokenKeyword, Data: "[["},
3113 {Type: TokenWhitespace, Data: " "},
3114 {Type: TokenWord, Data: "a"},
3115 {Type: TokenWhitespace, Data: " "},
3116 {Type: TokenKeyword, Data: "-eq"},
3117 {Type: TokenWhitespace, Data: " "},
3118 {Type: parser.TokenError, Data: "invalid character"},
3119 },
3120 },
3121 { // 194
3122 "[[ #comment\na = b ]]",
3123 []parser.Token{
3124 {Type: TokenKeyword, Data: "[["},
3125 {Type: TokenWhitespace, Data: " "},
3126 {Type: TokenComment, Data: "#comment"},
3127 {Type: TokenLineTerminator, Data: "\n"},
3128 {Type: TokenWord, Data: "a"},
3129 {Type: TokenWhitespace, Data: " "},
3130 {Type: TokenBinaryOperator, Data: "="},
3131 {Type: TokenWhitespace, Data: " "},
3132 {Type: TokenPattern, Data: "b"},
3133 {Type: TokenWhitespace, Data: " "},
3134 {Type: TokenKeyword, Data: "]]"},
3135 {Type: parser.TokenDone, Data: ""},
3136 },
3137 },
3138 { // 195
3139 "[[ ( a = b ) #comment\n ]]",
3140 []parser.Token{
3141 {Type: TokenKeyword, Data: "[["},
3142 {Type: TokenWhitespace, Data: " "},
3143 {Type: TokenPunctuator, Data: "("},
3144 {Type: TokenWhitespace, Data: " "},
3145 {Type: TokenWord, Data: "a"},
3146 {Type: TokenWhitespace, Data: " "},
3147 {Type: TokenBinaryOperator, Data: "="},
3148 {Type: TokenWhitespace, Data: " "},
3149 {Type: TokenPattern, Data: "b"},
3150 {Type: TokenWhitespace, Data: " "},
3151 {Type: TokenPunctuator, Data: ")"},
3152 {Type: TokenWhitespace, Data: " "},
3153 {Type: TokenComment, Data: "#comment"},
3154 {Type: TokenLineTerminator, Data: "\n"},
3155 {Type: TokenWhitespace, Data: " "},
3156 {Type: TokenKeyword, Data: "]]"},
3157 {Type: parser.TokenDone, Data: ""},
3158 },
3159 },
3160 { // 196
3161 "[[ a = #comment\nb ]]",
3162 []parser.Token{
3163 {Type: TokenKeyword, Data: "[["},
3164 {Type: TokenWhitespace, Data: " "},
3165 {Type: TokenWord, Data: "a"},
3166 {Type: TokenWhitespace, Data: " "},
3167 {Type: TokenBinaryOperator, Data: "="},
3168 {Type: TokenWhitespace, Data: " "},
3169 {Type: parser.TokenError, Data: "invalid character"},
3170 },
3171 },
3172 { // 197
3173 "[[ a -net b ]]",
3174 []parser.Token{
3175 {Type: TokenKeyword, Data: "[["},
3176 {Type: TokenWhitespace, Data: " "},
3177 {Type: TokenWord, Data: "a"},
3178 {Type: TokenWhitespace, Data: " "},
3179 {Type: parser.TokenError, Data: "invalid operator"},
3180 },
3181 },
3182 { // 198
3183 "[[ -f file ]]\n[[ ! -e file\"str\" ]];[[ -S \"str\"a",
3184 []parser.Token{
3185 {Type: TokenKeyword, Data: "[["},
3186 {Type: TokenWhitespace, Data: " "},
3187 {Type: TokenKeyword, Data: "-f"},
3188 {Type: TokenWhitespace, Data: " "},
3189 {Type: TokenWord, Data: "file"},
3190 {Type: TokenWhitespace, Data: " "},
3191 {Type: TokenKeyword, Data: "]]"},
3192 {Type: TokenLineTerminator, Data: "\n"},
3193 {Type: TokenKeyword, Data: "[["},
3194 {Type: TokenWhitespace, Data: " "},
3195 {Type: TokenPunctuator, Data: "!"},
3196 {Type: TokenWhitespace, Data: " "},
3197 {Type: TokenKeyword, Data: "-e"},
3198 {Type: TokenWhitespace, Data: " "},
3199 {Type: TokenWord, Data: "file"},
3200 {Type: TokenString, Data: "\"str\""},
3201 {Type: TokenWhitespace, Data: " "},
3202 {Type: TokenKeyword, Data: "]]"},
3203 {Type: TokenPunctuator, Data: ";"},
3204 {Type: TokenKeyword, Data: "[["},
3205 {Type: TokenWhitespace, Data: " "},
3206 {Type: TokenKeyword, Data: "-S"},
3207 {Type: TokenWhitespace, Data: " "},
3208 {Type: TokenString, Data: "\"str\""},
3209 {Type: TokenWord, Data: "a"},
3210 {Type: parser.TokenError, Data: "unexpected EOF"},
3211 },
3212 },
3213 { // 199
3214 "\"",
3215 []parser.Token{
3216 {Type: parser.TokenError, Data: "unexpected EOF"},
3217 },
3218 },
3219 { // 200
3220 "$((",
3221 []parser.Token{
3222 {Type: TokenPunctuator, Data: "$(("},
3223 {Type: parser.TokenError, Data: "unexpected EOF"},
3224 },
3225 },
3226 { // 201
3227 "$(( \"1\" ))",
3228 []parser.Token{
3229 {Type: TokenPunctuator, Data: "$(("},
3230 {Type: TokenWhitespace, Data: " "},
3231 {Type: TokenString, Data: "\"1\""},
3232 {Type: TokenWhitespace, Data: " "},
3233 {Type: TokenPunctuator, Data: "))"},
3234 {Type: parser.TokenDone, Data: ""},
3235 },
3236 },
3237 { // 202
3238 "$(( : ))",
3239 []parser.Token{
3240 {Type: TokenPunctuator, Data: "$(("},
3241 {Type: TokenWhitespace, Data: " "},
3242 {Type: parser.TokenError, Data: "invalid character"},
3243 },
3244 },
3245 { // 203
3246 "$(( ; ))",
3247 []parser.Token{
3248 {Type: TokenPunctuator, Data: "$(("},
3249 {Type: TokenWhitespace, Data: " "},
3250 {Type: parser.TokenError, Data: "invalid character"},
3251 },
3252 },
3253 { // 204
3254 "`\\",
3255 []parser.Token{
3256 {Type: TokenOpenBacktick, Data: "`"},
3257 {Type: parser.TokenError, Data: "incorrect backtick depth"},
3258 },
3259 },
3260 { // 205
3261 "<<",
3262 []parser.Token{
3263 {Type: TokenPunctuator, Data: "<<"},
3264 {Type: parser.TokenError, Data: "unexpected EOF"},
3265 },
3266 },
3267 { // 206
3268 "<<a",
3269 []parser.Token{
3270 {Type: TokenPunctuator, Data: "<<"},
3271 {Type: parser.TokenError, Data: "unexpected EOF"},
3272 },
3273 },
3274 { // 207
3275 "<<a\\n\\tc\n123\na\n\tc",
3276 []parser.Token{
3277 {Type: TokenPunctuator, Data: "<<"},
3278 {Type: TokenWord, Data: "a\\n\\tc"},
3279 {Type: TokenLineTerminator, Data: "\n"},
3280 {Type: TokenHeredoc, Data: "123\n"},
3281 {Type: TokenHeredocEnd, Data: "a\n\tc"},
3282 {Type: parser.TokenDone, Data: ""},
3283 },
3284 },
3285 { // 208
3286 "<<abc\n123",
3287 []parser.Token{
3288 {Type: TokenPunctuator, Data: "<<"},
3289 {Type: TokenWord, Data: "abc"},
3290 {Type: TokenLineTerminator, Data: "\n"},
3291 {Type: parser.TokenError, Data: "unexpected EOF"},
3292 },
3293 },
3294 { // 209
3295 "<<abc\n123$\nabc",
3296 []parser.Token{
3297 {Type: TokenPunctuator, Data: "<<"},
3298 {Type: TokenWord, Data: "abc"},
3299 {Type: TokenLineTerminator, Data: "\n"},
3300 {Type: TokenHeredoc, Data: "123$\n"},
3301 {Type: TokenHeredocEnd, Data: "abc"},
3302 {Type: parser.TokenDone, Data: ""},
3303 },
3304 },
3305 { // 210
3306 "${a!}",
3307 []parser.Token{
3308 {Type: TokenPunctuator, Data: "${"},
3309 {Type: TokenIdentifier, Data: "a"},
3310 {Type: parser.TokenError, Data: "invalid parameter expansion"},
3311 },
3312 },
3313 { // 211
3314 "${a:b}",
3315 []parser.Token{
3316 {Type: TokenPunctuator, Data: "${"},
3317 {Type: TokenIdentifier, Data: "a"},
3318 {Type: TokenPunctuator, Data: ":"},
3319 {Type: parser.TokenError, Data: "invalid parameter expansion"},
3320 },
3321 },
3322 { // 212
3323 "${a:1:b}",
3324 []parser.Token{
3325 {Type: TokenPunctuator, Data: "${"},
3326 {Type: TokenIdentifier, Data: "a"},
3327 {Type: TokenPunctuator, Data: ":"},
3328 {Type: TokenNumberLiteral, Data: "1"},
3329 {Type: TokenPunctuator, Data: ":"},
3330 {Type: parser.TokenError, Data: "invalid parameter expansion"},
3331 },
3332 },
3333 { // 213
3334 "${a/(}",
3335 []parser.Token{
3336 {Type: TokenPunctuator, Data: "${"},
3337 {Type: TokenIdentifier, Data: "a"},
3338 {Type: TokenPunctuator, Data: "/"},
3339 {Type: parser.TokenError, Data: "invalid character"},
3340 },
3341 },
3342 { // 214
3343 "${a/",
3344 []parser.Token{
3345 {Type: TokenPunctuator, Data: "${"},
3346 {Type: TokenIdentifier, Data: "a"},
3347 {Type: TokenPunctuator, Data: "/"},
3348 {Type: parser.TokenError, Data: "unexpected EOF"},
3349 },
3350 },
3351 { // 215
3352 "${a/)}",
3353 []parser.Token{
3354 {Type: TokenPunctuator, Data: "${"},
3355 {Type: TokenIdentifier, Data: "a"},
3356 {Type: TokenPunctuator, Data: "/"},
3357 {Type: parser.TokenError, Data: "invalid character"},
3358 },
3359 },
3360 { // 216
3361 "${a/b[\\t]+/c}",
3362 []parser.Token{
3363 {Type: TokenPunctuator, Data: "${"},
3364 {Type: TokenIdentifier, Data: "a"},
3365 {Type: TokenPunctuator, Data: "/"},
3366 {Type: TokenPattern, Data: "b[\\t]+"},
3367 {Type: TokenPunctuator, Data: "/"},
3368 {Type: TokenWord, Data: "c"},
3369 {Type: TokenPunctuator, Data: "}"},
3370 {Type: parser.TokenDone, Data: ""},
3371 },
3372 },
3373 { // 217
3374 "${a-b}",
3375 []parser.Token{
3376 {Type: TokenPunctuator, Data: "${"},
3377 {Type: TokenIdentifier, Data: "a"},
3378 {Type: TokenPunctuator, Data: "-"},
3379 {Type: TokenWord, Data: "b"},
3380 {Type: TokenPunctuator, Data: "}"},
3381 {Type: parser.TokenDone, Data: ""},
3382 },
3383 },
3384 { // 218
3385 "${a+b}",
3386 []parser.Token{
3387 {Type: TokenPunctuator, Data: "${"},
3388 {Type: TokenIdentifier, Data: "a"},
3389 {Type: TokenPunctuator, Data: "+"},
3390 {Type: TokenWord, Data: "b"},
3391 {Type: TokenPunctuator, Data: "}"},
3392 {Type: parser.TokenDone, Data: ""},
3393 },
3394 },
3395 { // 219
3396 "${a=b}",
3397 []parser.Token{
3398 {Type: TokenPunctuator, Data: "${"},
3399 {Type: TokenIdentifier, Data: "a"},
3400 {Type: TokenPunctuator, Data: "="},
3401 {Type: TokenWord, Data: "b"},
3402 {Type: TokenPunctuator, Data: "}"},
3403 {Type: parser.TokenDone, Data: ""},
3404 },
3405 },
3406 { // 220
3407 "${a?b}",
3408 []parser.Token{
3409 {Type: TokenPunctuator, Data: "${"},
3410 {Type: TokenIdentifier, Data: "a"},
3411 {Type: TokenPunctuator, Data: "?"},
3412 {Type: TokenWord, Data: "b"},
3413 {Type: TokenPunctuator, Data: "}"},
3414 {Type: parser.TokenDone, Data: ""},
3415 },
3416 },
3417 { // 221
3418 "$(( 0x\"1\" ))",
3419 []parser.Token{
3420 {Type: TokenPunctuator, Data: "$(("},
3421 {Type: TokenWhitespace, Data: " "},
3422 {Type: TokenWord, Data: "0x"},
3423 {Type: TokenString, Data: "\"1\""},
3424 {Type: TokenWhitespace, Data: " "},
3425 {Type: TokenPunctuator, Data: "))"},
3426 {Type: parser.TokenDone, Data: ""},
3427 },
3428 },
3429 { // 222
3430 "$(( 2#, ))",
3431 []parser.Token{
3432 {Type: TokenPunctuator, Data: "$(("},
3433 {Type: TokenWhitespace, Data: " "},
3434 {Type: parser.TokenError, Data: "invalid number"},
3435 },
3436 },
3437 { // 223
3438 "function a time",
3439 []parser.Token{
3440 {Type: TokenKeyword, Data: "function"},
3441 {Type: TokenWhitespace, Data: " "},
3442 {Type: TokenFunctionIdentifier, Data: "a"},
3443 {Type: TokenWhitespace, Data: " "},
3444 {Type: parser.TokenError, Data: "invalid keyword"},
3445 },
3446 },
3447 { // 224
3448 "then",
3449 []parser.Token{
3450 {Type: parser.TokenError, Data: "invalid keyword"},
3451 },
3452 },
3453 { // 225
3454 "in",
3455 []parser.Token{
3456 {Type: parser.TokenError, Data: "invalid keyword"},
3457 },
3458 },
3459 { // 226
3460 "do",
3461 []parser.Token{
3462 {Type: parser.TokenError, Data: "invalid keyword"},
3463 },
3464 },
3465 { // 227
3466 "elif",
3467 []parser.Token{
3468 {Type: parser.TokenError, Data: "invalid keyword"},
3469 },
3470 },
3471 { // 228
3472 "else",
3473 []parser.Token{
3474 {Type: parser.TokenError, Data: "invalid keyword"},
3475 },
3476 },
3477 { // 229
3478 "fi",
3479 []parser.Token{
3480 {Type: parser.TokenError, Data: "invalid keyword"},
3481 },
3482 },
3483 { // 230
3484 "done",
3485 []parser.Token{
3486 {Type: parser.TokenError, Data: "invalid keyword"},
3487 },
3488 },
3489 { // 231
3490 "esac",
3491 []parser.Token{
3492 {Type: parser.TokenError, Data: "invalid keyword"},
3493 },
3494 },
3495 { // 232
3496 "function a coproc",
3497 []parser.Token{
3498 {Type: TokenKeyword, Data: "function"},
3499 {Type: TokenWhitespace, Data: " "},
3500 {Type: TokenFunctionIdentifier, Data: "a"},
3501 {Type: TokenWhitespace, Data: " "},
3502 {Type: parser.TokenError, Data: "invalid keyword"},
3503 },
3504 },
3505 { // 233
3506 "function a function",
3507 []parser.Token{
3508 {Type: TokenKeyword, Data: "function"},
3509 {Type: TokenWhitespace, Data: " "},
3510 {Type: TokenFunctionIdentifier, Data: "a"},
3511 {Type: TokenWhitespace, Data: " "},
3512 {Type: parser.TokenError, Data: "invalid keyword"},
3513 },
3514 },
3515 { // 234
3516 "function a(\n) { echo b; }",
3517 []parser.Token{
3518 {Type: TokenKeyword, Data: "function"},
3519 {Type: TokenWhitespace, Data: " "},
3520 {Type: TokenFunctionIdentifier, Data: "a"},
3521 {Type: TokenPunctuator, Data: "("},
3522 {Type: parser.TokenError, Data: "missing closing paren"},
3523 },
3524 },
3525 { // 235
3526 "function (\n) { echo b; }",
3527 []parser.Token{
3528 {Type: TokenKeyword, Data: "function"},
3529 {Type: TokenWhitespace, Data: " "},
3530 {Type: parser.TokenError, Data: "invalid identifier"},
3531 },
3532 },
3533 { // 236
3534 "function a()",
3535 []parser.Token{
3536 {Type: TokenKeyword, Data: "function"},
3537 {Type: TokenWhitespace, Data: " "},
3538 {Type: TokenFunctionIdentifier, Data: "a"},
3539 {Type: TokenPunctuator, Data: "("},
3540 {Type: TokenPunctuator, Data: ")"},
3541 {Type: parser.TokenError, Data: "unexpected EOF"},
3542 },
3543 },
3544 { // 237
3545 "select %; do b; done",
3546 []parser.Token{
3547 {Type: TokenKeyword, Data: "select"},
3548 {Type: TokenWhitespace, Data: " "},
3549 {Type: parser.TokenError, Data: "invalid identifier"},
3550 },
3551 },
3552 { // 238
3553 "declare a=b",
3554 []parser.Token{
3555 {Type: TokenBuiltin, Data: "declare"},
3556 {Type: TokenWhitespace, Data: " "},
3557 {Type: TokenIdentifierAssign, Data: "a"},
3558 {Type: TokenPunctuator, Data: "="},
3559 {Type: TokenWord, Data: "b"},
3560 {Type: parser.TokenDone, Data: ""},
3561 },
3562 },
3563 { // 239
3564 "local -a a=b c=d",
3565 []parser.Token{
3566 {Type: TokenBuiltin, Data: "local"},
3567 {Type: TokenWhitespace, Data: " "},
3568 {Type: TokenOperator, Data: "-a"},
3569 {Type: TokenWhitespace, Data: " "},
3570 {Type: TokenIdentifierAssign, Data: "a"},
3571 {Type: TokenPunctuator, Data: "="},
3572 {Type: TokenWord, Data: "b"},
3573 {Type: TokenWhitespace, Data: " "},
3574 {Type: TokenIdentifierAssign, Data: "c"},
3575 {Type: TokenPunctuator, Data: "="},
3576 {Type: TokenWord, Data: "d"},
3577 {Type: parser.TokenDone, Data: ""},
3578 },
3579 },
3580 { // 240
3581 "typeset -aFx a=b",
3582 []parser.Token{
3583 {Type: TokenBuiltin, Data: "typeset"},
3584 {Type: TokenWhitespace, Data: " "},
3585 {Type: TokenOperator, Data: "-aFx"},
3586 {Type: TokenWhitespace, Data: " "},
3587 {Type: TokenIdentifierAssign, Data: "a"},
3588 {Type: TokenPunctuator, Data: "="},
3589 {Type: TokenWord, Data: "b"},
3590 {Type: parser.TokenDone, Data: ""},
3591 },
3592 },
3593 { // 241
3594 "readonly -A -p -f a=b",
3595 []parser.Token{
3596 {Type: TokenBuiltin, Data: "readonly"},
3597 {Type: TokenWhitespace, Data: " "},
3598 {Type: TokenOperator, Data: "-A"},
3599 {Type: TokenWhitespace, Data: " "},
3600 {Type: TokenOperator, Data: "-p"},
3601 {Type: TokenWhitespace, Data: " "},
3602 {Type: TokenOperator, Data: "-f"},
3603 {Type: TokenWhitespace, Data: " "},
3604 {Type: TokenIdentifierAssign, Data: "a"},
3605 {Type: TokenPunctuator, Data: "="},
3606 {Type: TokenWord, Data: "b"},
3607 {Type: parser.TokenDone, Data: ""},
3608 },
3609 },
3610 { // 242
3611 "export -n -1 a",
3612 []parser.Token{
3613 {Type: TokenBuiltin, Data: "export"},
3614 {Type: TokenWhitespace, Data: " "},
3615 {Type: TokenOperator, Data: "-n"},
3616 {Type: TokenWhitespace, Data: " "},
3617 {Type: parser.TokenError, Data: "invalid character"},
3618 },
3619 },
3620 { // 243
3621 "let a=1",
3622 []parser.Token{
3623 {Type: TokenBuiltin, Data: "let"},
3624 {Type: TokenWhitespace, Data: " "},
3625 {Type: TokenLetIdentifierAssign, Data: "a"},
3626 {Type: TokenPunctuator, Data: "="},
3627 {Type: TokenNumberLiteral, Data: "1"},
3628 {Type: parser.TokenDone, Data: ""},
3629 },
3630 },
3631 { // 244
3632 "let a=(1)",
3633 []parser.Token{
3634 {Type: TokenBuiltin, Data: "let"},
3635 {Type: TokenWhitespace, Data: " "},
3636 {Type: TokenLetIdentifierAssign, Data: "a"},
3637 {Type: TokenPunctuator, Data: "="},
3638 {Type: TokenPunctuator, Data: "("},
3639 {Type: TokenNumberLiteral, Data: "1"},
3640 {Type: TokenPunctuator, Data: ")"},
3641 {Type: parser.TokenDone, Data: ""},
3642 },
3643 },
3644 { // 245
3645 "let a=( 1 );",
3646 []parser.Token{
3647 {Type: TokenBuiltin, Data: "let"},
3648 {Type: TokenWhitespace, Data: " "},
3649 {Type: TokenLetIdentifierAssign, Data: "a"},
3650 {Type: TokenPunctuator, Data: "="},
3651 {Type: TokenPunctuator, Data: "("},
3652 {Type: TokenWhitespace, Data: " "},
3653 {Type: TokenNumberLiteral, Data: "1"},
3654 {Type: TokenWhitespace, Data: " "},
3655 {Type: TokenPunctuator, Data: ")"},
3656 {Type: TokenPunctuator, Data: ";"},
3657 {Type: parser.TokenDone, Data: ""},
3658 },
3659 },
3660 { // 246
3661 "let a=1+2 b=2*3",
3662 []parser.Token{
3663 {Type: TokenBuiltin, Data: "let"},
3664 {Type: TokenWhitespace, Data: " "},
3665 {Type: TokenLetIdentifierAssign, Data: "a"},
3666 {Type: TokenPunctuator, Data: "="},
3667 {Type: TokenNumberLiteral, Data: "1"},
3668 {Type: TokenPunctuator, Data: "+"},
3669 {Type: TokenNumberLiteral, Data: "2"},
3670 {Type: TokenWhitespace, Data: " "},
3671 {Type: TokenLetIdentifierAssign, Data: "b"},
3672 {Type: TokenPunctuator, Data: "="},
3673 {Type: TokenNumberLiteral, Data: "2"},
3674 {Type: TokenPunctuator, Data: "*"},
3675 {Type: TokenNumberLiteral, Data: "3"},
3676 {Type: parser.TokenDone, Data: ""},
3677 },
3678 },
3679 { // 247
3680 "let a=b?c:d",
3681 []parser.Token{
3682 {Type: TokenBuiltin, Data: "let"},
3683 {Type: TokenWhitespace, Data: " "},
3684 {Type: TokenLetIdentifierAssign, Data: "a"},
3685 {Type: TokenPunctuator, Data: "="},
3686 {Type: TokenWord, Data: "b"},
3687 {Type: TokenPunctuator, Data: "?"},
3688 {Type: TokenWord, Data: "c"},
3689 {Type: TokenPunctuator, Data: ":"},
3690 {Type: TokenWord, Data: "d"},
3691 {Type: parser.TokenDone, Data: ""},
3692 },
3693 },
3694 { // 248
3695 "let a=b ? c : d",
3696 []parser.Token{
3697 {Type: TokenBuiltin, Data: "let"},
3698 {Type: TokenWhitespace, Data: " "},
3699 {Type: TokenLetIdentifierAssign, Data: "a"},
3700 {Type: TokenPunctuator, Data: "="},
3701 {Type: TokenWord, Data: "b"},
3702 {Type: TokenWhitespace, Data: " "},
3703 {Type: TokenWord, Data: "?"},
3704 {Type: TokenWhitespace, Data: " "},
3705 {Type: TokenWord, Data: "c"},
3706 {Type: TokenWhitespace, Data: " "},
3707 {Type: TokenWord, Data: ":"},
3708 {Type: TokenWhitespace, Data: " "},
3709 {Type: TokenWord, Data: "d"},
3710 {Type: parser.TokenDone, Data: ""},
3711 },
3712 },
3713 { // 249
3714 "let a=( b ? c : d )",
3715 []parser.Token{
3716 {Type: TokenBuiltin, Data: "let"},
3717 {Type: TokenWhitespace, Data: " "},
3718 {Type: TokenLetIdentifierAssign, Data: "a"},
3719 {Type: TokenPunctuator, Data: "="},
3720 {Type: TokenPunctuator, Data: "("},
3721 {Type: TokenWhitespace, Data: " "},
3722 {Type: TokenWord, Data: "b"},
3723 {Type: TokenWhitespace, Data: " "},
3724 {Type: TokenPunctuator, Data: "?"},
3725 {Type: TokenWhitespace, Data: " "},
3726 {Type: TokenWord, Data: "c"},
3727 {Type: TokenWhitespace, Data: " "},
3728 {Type: TokenPunctuator, Data: ":"},
3729 {Type: TokenWhitespace, Data: " "},
3730 {Type: TokenWord, Data: "d"},
3731 {Type: TokenWhitespace, Data: " "},
3732 {Type: TokenPunctuator, Data: ")"},
3733 {Type: parser.TokenDone, Data: ""},
3734 },
3735 },
3736 { // 250
3737 "let a={b",
3738 []parser.Token{
3739 {Type: TokenBuiltin, Data: "let"},
3740 {Type: TokenWhitespace, Data: " "},
3741 {Type: TokenLetIdentifierAssign, Data: "a"},
3742 {Type: TokenPunctuator, Data: "="},
3743 {Type: parser.TokenError, Data: "invalid character"},
3744 },
3745 },
3746 { // 251
3747 "let a={b..c}",
3748 []parser.Token{
3749 {Type: TokenBuiltin, Data: "let"},
3750 {Type: TokenWhitespace, Data: " "},
3751 {Type: TokenLetIdentifierAssign, Data: "a"},
3752 {Type: TokenPunctuator, Data: "="},
3753 {Type: TokenBraceExpansion, Data: "{b..c}"},
3754 {Type: parser.TokenDone, Data: ""},
3755 },
3756 },
3757 { // 252
3758 "let a=(b + c{d..e})",
3759 []parser.Token{
3760 {Type: TokenBuiltin, Data: "let"},
3761 {Type: TokenWhitespace, Data: " "},
3762 {Type: TokenLetIdentifierAssign, Data: "a"},
3763 {Type: TokenPunctuator, Data: "="},
3764 {Type: TokenPunctuator, Data: "("},
3765 {Type: TokenWord, Data: "b"},
3766 {Type: TokenWhitespace, Data: " "},
3767 {Type: TokenPunctuator, Data: "+"},
3768 {Type: TokenWhitespace, Data: " "},
3769 {Type: TokenWord, Data: "c"},
3770 {Type: TokenBraceExpansion, Data: "{d..e}"},
3771 {Type: TokenPunctuator, Data: ")"},
3772 {Type: parser.TokenDone, Data: ""},
3773 },
3774 },
3775 { // 253
3776 "a=()",
3777 []parser.Token{
3778 {Type: TokenIdentifierAssign, Data: "a"},
3779 {Type: TokenPunctuator, Data: "="},
3780 {Type: TokenPunctuator, Data: "("},
3781 {Type: TokenPunctuator, Data: ")"},
3782 {Type: parser.TokenDone, Data: ""},
3783 },
3784 },
3785 { // 254
3786 "a=(b c)",
3787 []parser.Token{
3788 {Type: TokenIdentifierAssign, Data: "a"},
3789 {Type: TokenPunctuator, Data: "="},
3790 {Type: TokenPunctuator, Data: "("},
3791 {Type: TokenWord, Data: "b"},
3792 {Type: TokenWhitespace, Data: " "},
3793 {Type: TokenWord, Data: "c"},
3794 {Type: TokenPunctuator, Data: ")"},
3795 {Type: parser.TokenDone, Data: ""},
3796 },
3797 },
3798 { // 255
3799 "a=([b]=c [d]=e)",
3800 []parser.Token{
3801 {Type: TokenIdentifierAssign, Data: "a"},
3802 {Type: TokenPunctuator, Data: "="},
3803 {Type: TokenPunctuator, Data: "("},
3804 {Type: TokenWord, Data: "[b]=c"},
3805 {Type: TokenWhitespace, Data: " "},
3806 {Type: TokenWord, Data: "[d]=e"},
3807 {Type: TokenPunctuator, Data: ")"},
3808 {Type: parser.TokenDone, Data: ""},
3809 },
3810 },
3811 { // 256
3812 "a[ b]=1 c",
3813 []parser.Token{
3814 {Type: TokenIdentifierAssign, Data: "a"},
3815 {Type: TokenPunctuator, Data: "["},
3816 {Type: TokenWhitespace, Data: " "},
3817 {Type: TokenWord, Data: "b"},
3818 {Type: TokenPunctuator, Data: "]"},
3819 {Type: TokenPunctuator, Data: "="},
3820 {Type: TokenWord, Data: "1"},
3821 {Type: TokenWhitespace, Data: " "},
3822 {Type: TokenWord, Data: "c"},
3823 {Type: parser.TokenDone, Data: ""},
3824 },
3825 },
3826 { // 257
3827 "a b[ c]=1",
3828 []parser.Token{
3829 {Type: TokenWord, Data: "a"},
3830 {Type: TokenWhitespace, Data: " "},
3831 {Type: TokenWord, Data: "b["},
3832 {Type: TokenWhitespace, Data: " "},
3833 {Type: TokenWord, Data: "c]=1"},
3834 {Type: parser.TokenDone, Data: ""},
3835 },
3836 },
3837 { // 258
3838 "( #comment\n)",
3839 []parser.Token{
3840 {Type: TokenPunctuator, Data: "("},
3841 {Type: TokenWhitespace, Data: " "},
3842 {Type: TokenComment, Data: "#comment"},
3843 {Type: TokenLineTerminator, Data: "\n"},
3844 {Type: TokenPunctuator, Data: ")"},
3845 {Type: parser.TokenDone, Data: ""},
3846 },
3847 },
3848 { // 259
3849 "{ #comment\n}",
3850 []parser.Token{
3851 {Type: TokenPunctuator, Data: "{"},
3852 {Type: TokenWhitespace, Data: " "},
3853 {Type: TokenComment, Data: "#comment"},
3854 {Type: TokenLineTerminator, Data: "\n"},
3855 {Type: TokenPunctuator, Data: "}"},
3856 {Type: parser.TokenDone, Data: ""},
3857 },
3858 },
3859 { // 260
3860 "(( #comment\n))",
3861 []parser.Token{
3862 {Type: TokenPunctuator, Data: "(("},
3863 {Type: TokenWhitespace, Data: " "},
3864 {Type: parser.TokenError, Data: "invalid character"},
3865 },
3866 },
3867 { // 261
3868 "a[",
3869 []parser.Token{
3870 {Type: TokenIdentifierAssign, Data: "a"},
3871 {Type: TokenPunctuator, Data: "["},
3872 {Type: parser.TokenError, Data: "unexpected EOF"},
3873 },
3874 },
3875 { // 262
3876 "a[b]c[",
3877 []parser.Token{
3878 {Type: TokenWord, Data: "a"},
3879 {Type: TokenPunctuator, Data: "["},
3880 {Type: TokenWord, Data: "b"},
3881 {Type: TokenPunctuator, Data: "]"},
3882 {Type: TokenWord, Data: "c["},
3883 {Type: parser.TokenDone, Data: ""},
3884 },
3885 },
3886 { // 263
3887 "a[b]=c d[",
3888 []parser.Token{
3889 {Type: TokenIdentifierAssign, Data: "a"},
3890 {Type: TokenPunctuator, Data: "["},
3891 {Type: TokenWord, Data: "b"},
3892 {Type: TokenPunctuator, Data: "]"},
3893 {Type: TokenPunctuator, Data: "="},
3894 {Type: TokenWord, Data: "c"},
3895 {Type: TokenWhitespace, Data: " "},
3896 {Type: TokenIdentifierAssign, Data: "d"},
3897 {Type: TokenPunctuator, Data: "["},
3898 {Type: parser.TokenError, Data: "unexpected EOF"},
3899 },
3900 },
3901 { // 264
3902 "a[$b+1]=c",
3903 []parser.Token{
3904 {Type: TokenIdentifierAssign, Data: "a"},
3905 {Type: TokenPunctuator, Data: "["},
3906 {Type: TokenIdentifier, Data: "$b"},
3907 {Type: TokenPunctuator, Data: "+"},
3908 {Type: TokenNumberLiteral, Data: "1"},
3909 {Type: TokenPunctuator, Data: "]"},
3910 {Type: TokenPunctuator, Data: "="},
3911 {Type: TokenWord, Data: "c"},
3912 {Type: parser.TokenDone, Data: ""},
3913 },
3914 },
3915 { // 265
3916 "a[b]=( 1 )",
3917 []parser.Token{
3918 {Type: TokenIdentifierAssign, Data: "a"},
3919 {Type: TokenPunctuator, Data: "["},
3920 {Type: TokenWord, Data: "b"},
3921 {Type: TokenPunctuator, Data: "]"},
3922 {Type: TokenPunctuator, Data: "="},
3923 {Type: parser.TokenError, Data: "invalid character"},
3924 },
3925 },
3926 { // 266
3927 "a[ b ]",
3928 []parser.Token{
3929 {Type: TokenWord, Data: "a"},
3930 {Type: TokenPunctuator, Data: "["},
3931 {Type: TokenWord, Data: " "},
3932 {Type: TokenWord, Data: "b"},
3933 {Type: TokenWord, Data: " "},
3934 {Type: TokenPunctuator, Data: "]"},
3935 {Type: parser.TokenDone, Data: ""},
3936 },
3937 },
3938 { // 267
3939 "a[\nb\n]",
3940 []parser.Token{
3941 {Type: TokenWord, Data: "a"},
3942 {Type: TokenPunctuator, Data: "["},
3943 {Type: TokenWord, Data: "\n"},
3944 {Type: TokenWord, Data: "b"},
3945 {Type: TokenWord, Data: "\n"},
3946 {Type: TokenPunctuator, Data: "]"},
3947 {Type: parser.TokenDone, Data: ""},
3948 },
3949 },
3950 { // 268
3951 "a[ b ][ c ]",
3952 []parser.Token{
3953 {Type: TokenWord, Data: "a"},
3954 {Type: TokenPunctuator, Data: "["},
3955 {Type: TokenWord, Data: " "},
3956 {Type: TokenWord, Data: "b"},
3957 {Type: TokenWord, Data: " "},
3958 {Type: TokenPunctuator, Data: "]"},
3959 {Type: TokenWord, Data: "["},
3960 {Type: TokenWhitespace, Data: " "},
3961 {Type: TokenWord, Data: "c"},
3962 {Type: TokenWhitespace, Data: " "},
3963 {Type: TokenWord, Data: "]"},
3964 {Type: parser.TokenDone, Data: ""},
3965 },
3966 },
3967 { // 269
3968 "a b[",
3969 []parser.Token{
3970 {Type: TokenWord, Data: "a"},
3971 {Type: TokenWhitespace, Data: " "},
3972 {Type: TokenWord, Data: "b["},
3973 {Type: parser.TokenDone, Data: ""},
3974 },
3975 },
3976 { // 270
3977 "a[b]",
3978 []parser.Token{
3979 {Type: TokenWord, Data: "a"},
3980 {Type: TokenPunctuator, Data: "["},
3981 {Type: TokenWord, Data: "b"},
3982 {Type: TokenPunctuator, Data: "]"},
3983 {Type: parser.TokenDone, Data: ""},
3984 },
3985 },
3986 { // 271
3987 "a[b;]",
3988 []parser.Token{
3989 {Type: TokenWord, Data: "a"},
3990 {Type: TokenPunctuator, Data: "["},
3991 {Type: TokenWord, Data: "b;"},
3992 {Type: TokenPunctuator, Data: "]"},
3993 {Type: parser.TokenDone, Data: ""},
3994 },
3995 },
3996 { // 272
3997 "a[b;]=",
3998 []parser.Token{
3999 {Type: TokenIdentifierAssign, Data: "a"},
4000 {Type: TokenPunctuator, Data: "["},
4001 {Type: TokenWord, Data: "b"},
4002 {Type: parser.TokenError, Data: "invalid character"},
4003 },
4004 },
4005 { // 273
4006 "a[b{]",
4007 []parser.Token{
4008 {Type: TokenWord, Data: "a"},
4009 {Type: TokenPunctuator, Data: "["},
4010 {Type: TokenWord, Data: "b{"},
4011 {Type: TokenPunctuator, Data: "]"},
4012 {Type: parser.TokenDone, Data: ""},
4013 },
4014 },
4015 { // 274
4016 "a[b{]=",
4017 []parser.Token{
4018 {Type: TokenIdentifierAssign, Data: "a"},
4019 {Type: TokenPunctuator, Data: "["},
4020 {Type: TokenWord, Data: "b"},
4021 {Type: parser.TokenError, Data: "invalid character"},
4022 },
4023 },
4024 { // 275
4025 "a[b}]",
4026 []parser.Token{
4027 {Type: TokenWord, Data: "a"},
4028 {Type: TokenPunctuator, Data: "["},
4029 {Type: TokenWord, Data: "b"},
4030 {Type: TokenPunctuator, Data: "}"},
4031 {Type: TokenPunctuator, Data: "]"},
4032 {Type: parser.TokenDone, Data: ""},
4033 },
4034 },
4035 { // 276
4036 "a[b}]=",
4037 []parser.Token{
4038 {Type: TokenIdentifierAssign, Data: "a"},
4039 {Type: TokenPunctuator, Data: "["},
4040 {Type: TokenWord, Data: "b"},
4041 {Type: parser.TokenError, Data: "invalid character"},
4042 },
4043 },
4044 { // 277
4045 "a[b#]",
4046 []parser.Token{
4047 {Type: TokenWord, Data: "a"},
4048 {Type: TokenPunctuator, Data: "["},
4049 {Type: TokenWord, Data: "b#"},
4050 {Type: TokenPunctuator, Data: "]"},
4051 {Type: parser.TokenDone, Data: ""},
4052 },
4053 },
4054 { // 278
4055 "a[b #]",
4056 []parser.Token{
4057 {Type: TokenWord, Data: "a"},
4058 {Type: TokenPunctuator, Data: "["},
4059 {Type: TokenWord, Data: "b"},
4060 {Type: TokenWord, Data: " "},
4061 {Type: TokenWord, Data: "#"},
4062 {Type: TokenPunctuator, Data: "]"},
4063 {Type: parser.TokenDone, Data: ""},
4064 },
4065 },
4066 { // 279
4067 "a[b #]=",
4068 []parser.Token{
4069 {Type: TokenIdentifierAssign, Data: "a"},
4070 {Type: TokenPunctuator, Data: "["},
4071 {Type: TokenWord, Data: "b"},
4072 {Type: TokenWhitespace, Data: " "},
4073 {Type: parser.TokenError, Data: "invalid character"},
4074 },
4075 },
4076 { // 280
4077 "a[b #]+",
4078 []parser.Token{
4079 {Type: TokenWord, Data: "a"},
4080 {Type: TokenPunctuator, Data: "["},
4081 {Type: TokenWord, Data: "b"},
4082 {Type: TokenWord, Data: " "},
4083 {Type: TokenWord, Data: "#"},
4084 {Type: TokenPunctuator, Data: "]"},
4085 {Type: TokenWord, Data: "+"},
4086 {Type: parser.TokenDone, Data: ""},
4087 },
4088 },
4089 { // 281
4090 "a[b #]+=",
4091 []parser.Token{
4092 {Type: TokenIdentifierAssign, Data: "a"},
4093 {Type: TokenPunctuator, Data: "["},
4094 {Type: TokenWord, Data: "b"},
4095 {Type: TokenWhitespace, Data: " "},
4096 {Type: parser.TokenError, Data: "invalid character"},
4097 },
4098 },
4099 { // 282
4100 "a b[",
4101 []parser.Token{
4102 {Type: TokenWord, Data: "a"},
4103 {Type: TokenWhitespace, Data: " "},
4104 {Type: TokenWord, Data: "b["},
4105 {Type: parser.TokenDone, Data: ""},
4106 },
4107 },
4108 { // 283
4109 "a b[c]",
4110 []parser.Token{
4111 {Type: TokenWord, Data: "a"},
4112 {Type: TokenWhitespace, Data: " "},
4113 {Type: TokenWord, Data: "b[c]"},
4114 {Type: parser.TokenDone, Data: ""},
4115 },
4116 },
4117 { // 284
4118 "a b[c ]",
4119 []parser.Token{
4120 {Type: TokenWord, Data: "a"},
4121 {Type: TokenWhitespace, Data: " "},
4122 {Type: TokenWord, Data: "b[c"},
4123 {Type: TokenWhitespace, Data: " "},
4124 {Type: TokenWord, Data: "]"},
4125 {Type: parser.TokenDone, Data: ""},
4126 },
4127 },
4128 { // 285
4129 "a b[c]=1",
4130 []parser.Token{
4131 {Type: TokenWord, Data: "a"},
4132 {Type: TokenWhitespace, Data: " "},
4133 {Type: TokenIdentifierAssign, Data: "b"},
4134 {Type: TokenPunctuator, Data: "["},
4135 {Type: TokenWord, Data: "c"},
4136 {Type: TokenPunctuator, Data: "]"},
4137 {Type: TokenPunctuator, Data: "="},
4138 {Type: TokenWord, Data: "1"},
4139 {Type: parser.TokenDone, Data: ""},
4140 },
4141 },
4142 { // 286
4143 "a=(( 1 ))",
4144 []parser.Token{
4145 {Type: TokenIdentifierAssign, Data: "a"},
4146 {Type: TokenPunctuator, Data: "="},
4147 {Type: parser.TokenError, Data: "invalid character"},
4148 },
4149 },
4150 { // 287
4151 "a=",
4152 []parser.Token{
4153 {Type: TokenIdentifierAssign, Data: "a"},
4154 {Type: TokenPunctuator, Data: "="},
4155 {Type: parser.TokenDone, Data: ""},
4156 },
4157 },
4158 { // 288
4159 "a=;",
4160 []parser.Token{
4161 {Type: TokenIdentifierAssign, Data: "a"},
4162 {Type: TokenPunctuator, Data: "="},
4163 {Type: TokenPunctuator, Data: ";"},
4164 {Type: parser.TokenDone, Data: ""},
4165 },
4166 },
4167 { // 289
4168 "a= b",
4169 []parser.Token{
4170 {Type: TokenIdentifierAssign, Data: "a"},
4171 {Type: TokenPunctuator, Data: "="},
4172 {Type: TokenWhitespace, Data: " "},
4173 {Type: TokenWord, Data: "b"},
4174 {Type: parser.TokenDone, Data: ""},
4175 },
4176 },
4177 { // 290
4178 "a[$(b)]=",
4179 []parser.Token{
4180 {Type: TokenIdentifierAssign, Data: "a"},
4181 {Type: TokenPunctuator, Data: "["},
4182 {Type: TokenPunctuator, Data: "$("},
4183 {Type: TokenWord, Data: "b"},
4184 {Type: TokenPunctuator, Data: ")"},
4185 {Type: TokenPunctuator, Data: "]"},
4186 {Type: TokenPunctuator, Data: "="},
4187 {Type: parser.TokenDone, Data: ""},
4188 },
4189 },
4190 { // 291
4191 "{ function a() { b; } }",
4192 []parser.Token{
4193 {Type: TokenPunctuator, Data: "{"},
4194 {Type: TokenWhitespace, Data: " "},
4195 {Type: TokenKeyword, Data: "function"},
4196 {Type: TokenWhitespace, Data: " "},
4197 {Type: TokenFunctionIdentifier, Data: "a"},
4198 {Type: TokenPunctuator, Data: "("},
4199 {Type: TokenPunctuator, Data: ")"},
4200 {Type: TokenWhitespace, Data: " "},
4201 {Type: TokenPunctuator, Data: "{"},
4202 {Type: TokenWhitespace, Data: " "},
4203 {Type: TokenWord, Data: "b"},
4204 {Type: TokenPunctuator, Data: ";"},
4205 {Type: TokenWhitespace, Data: " "},
4206 {Type: TokenPunctuator, Data: "}"},
4207 {Type: TokenWhitespace, Data: " "},
4208 {Type: TokenPunctuator, Data: "}"},
4209 {Type: parser.TokenDone, Data: ""},
4210 },
4211 },
4212 { // 292
4213 "[ \"$a\" = \"b\" ]",
4214 []parser.Token{
4215 {Type: TokenWord, Data: "["},
4216 {Type: TokenWhitespace, Data: " "},
4217 {Type: TokenStringStart, Data: "\""},
4218 {Type: TokenIdentifier, Data: "$a"},
4219 {Type: TokenStringEnd, Data: "\""},
4220 {Type: TokenWhitespace, Data: " "},
4221 {Type: TokenWord, Data: "="},
4222 {Type: TokenWhitespace, Data: " "},
4223 {Type: TokenString, Data: "\"b\""},
4224 {Type: TokenWhitespace, Data: " "},
4225 {Type: TokenWord, Data: "]"},
4226 {Type: parser.TokenDone, Data: ""},
4227 },
4228 },
4229 } {
4230 p := parser.NewStringTokeniser(test.Input)
4231
4232 SetTokeniser(&p)
4233
4234 for m, tkn := range test.Output {
4235 if tk, _ := p.GetToken(); tk.Type != tkn.Type {
4236 if tk.Type == parser.TokenError {
4237 t.Errorf("test %d.%d: unexpected error: %s", n+1, m+1, tk.Data)
4238 } else {
4239 t.Errorf("test %d.%d: Incorrect type, expecting %d, got %d", n+1, m+1, tkn.Type, tk.Type)
4240 }
4241
4242 break
4243 } else if tk.Data != tkn.Data {
4244 t.Errorf("test %d.%d: Incorrect data, expecting %q, got %q", n+1, m+1, tkn.Data, tk.Data)
4245
4246 break
4247 }
4248 }
4249 }
4250 }
4251