bash - tokeniser_test.go
1 package bash
2
3 import (
4 "testing"
5
6 "vimagination.zapto.org/parser"
7 )
8
9 func TestTokeniser(t *testing.T) {
10 for n, test := range [...]struct {
11 Input string
12 Output []parser.Token
13 }{
14 { // 1
15 "",
16 []parser.Token{
17 {Type: parser.TokenDone, Data: ""},
18 },
19 },
20 { // 2
21 " ",
22 []parser.Token{
23 {Type: TokenWhitespace, Data: " "},
24 {Type: parser.TokenDone, Data: ""},
25 },
26 },
27 { // 3
28 " \t\\\n",
29 []parser.Token{
30 {Type: TokenWhitespace, Data: " \t\\\n"},
31 {Type: parser.TokenDone, Data: ""},
32 },
33 },
34 { // 4
35 "\\\n \t",
36 []parser.Token{
37 {Type: TokenWhitespace, Data: "\\\n \t"},
38 {Type: parser.TokenDone, Data: ""},
39 },
40 },
41 { // 5
42 " \n\n \n",
43 []parser.Token{
44 {Type: TokenWhitespace, Data: " "},
45 {Type: TokenLineTerminator, Data: "\n\n"},
46 {Type: TokenWhitespace, Data: " "},
47 {Type: TokenLineTerminator, Data: "\n"},
48 {Type: parser.TokenDone, Data: ""},
49 },
50 },
51 { // 6
52 "#A comment\n# B comment",
53 []parser.Token{
54 {Type: TokenComment, Data: "#A comment"},
55 {Type: TokenLineTerminator, Data: "\n"},
56 {Type: TokenComment, Data: "# B comment"},
57 {Type: parser.TokenDone, Data: ""},
58 },
59 },
60 { // 7
61 "$ident $name a\\nbc=a $0 $12 a$b a${b}c $$ $! $? $",
62 []parser.Token{
63 {Type: TokenIdentifier, Data: "$ident"},
64 {Type: TokenWhitespace, Data: " "},
65 {Type: TokenIdentifier, Data: "$name"},
66 {Type: TokenWhitespace, Data: " "},
67 {Type: TokenWord, Data: "a\\nbc=a"},
68 {Type: TokenWhitespace, Data: " "},
69 {Type: TokenIdentifier, Data: "$0"},
70 {Type: TokenWhitespace, Data: " "},
71 {Type: TokenIdentifier, Data: "$1"},
72 {Type: TokenWord, Data: "2"},
73 {Type: TokenWhitespace, Data: " "},
74 {Type: TokenWord, Data: "a"},
75 {Type: TokenIdentifier, Data: "$b"},
76 {Type: TokenWhitespace, Data: " "},
77 {Type: TokenWord, Data: "a"},
78 {Type: TokenPunctuator, Data: "${"},
79 {Type: TokenIdentifier, Data: "b"},
80 {Type: TokenPunctuator, Data: "}"},
81 {Type: TokenWord, Data: "c"},
82 {Type: TokenWhitespace, Data: " "},
83 {Type: TokenIdentifier, Data: "$$"},
84 {Type: TokenWhitespace, Data: " "},
85 {Type: TokenIdentifier, Data: "$!"},
86 {Type: TokenWhitespace, Data: " "},
87 {Type: TokenIdentifier, Data: "$?"},
88 {Type: TokenWhitespace, Data: " "},
89 {Type: TokenWord, Data: "$"},
90 {Type: parser.TokenDone, Data: ""},
91 },
92 },
93 { // 8
94 "abc=a def[0]=b ghi[$i]=c jkl+=d",
95 []parser.Token{
96 {Type: TokenIdentifierAssign, Data: "abc"},
97 {Type: TokenAssignment, Data: "="},
98 {Type: TokenWord, Data: "a"},
99 {Type: TokenWhitespace, Data: " "},
100 {Type: TokenIdentifierAssign, Data: "def"},
101 {Type: TokenPunctuator, Data: "["},
102 {Type: TokenNumberLiteral, Data: "0"},
103 {Type: TokenPunctuator, Data: "]"},
104 {Type: TokenAssignment, Data: "="},
105 {Type: TokenWord, Data: "b"},
106 {Type: TokenWhitespace, Data: " "},
107 {Type: TokenIdentifierAssign, Data: "ghi"},
108 {Type: TokenPunctuator, Data: "["},
109 {Type: TokenIdentifier, Data: "$i"},
110 {Type: TokenPunctuator, Data: "]"},
111 {Type: TokenAssignment, Data: "="},
112 {Type: TokenWord, Data: "c"},
113 {Type: TokenWhitespace, Data: " "},
114 {Type: TokenIdentifierAssign, Data: "jkl"},
115 {Type: TokenAssignment, Data: "+="},
116 {Type: TokenWord, Data: "d"},
117 {Type: parser.TokenDone, Data: ""},
118 },
119 },
120 { // 9
121 "ident ${name} ab\\nc=a ${6} a$ ",
122 []parser.Token{
123 {Type: TokenWord, Data: "ident"},
124 {Type: TokenWhitespace, Data: " "},
125 {Type: TokenPunctuator, Data: "${"},
126 {Type: TokenIdentifier, Data: "name"},
127 {Type: TokenPunctuator, Data: "}"},
128 {Type: TokenWhitespace, Data: " "},
129 {Type: TokenWord, Data: "ab\\nc=a"},
130 {Type: TokenWhitespace, Data: " "},
131 {Type: TokenPunctuator, Data: "${"},
132 {Type: TokenNumberLiteral, Data: "6"},
133 {Type: TokenPunctuator, Data: "}"},
134 {Type: TokenWhitespace, Data: " "},
135 {Type: TokenWord, Data: "a$"},
136 {Type: TokenWhitespace, Data: " "},
137 {Type: parser.TokenDone, Data: ""},
138 },
139 },
140 { // 10
141 "$(( 0 1 29 0xff 0xDeAdBeEf 0755 2#5 ))",
142 []parser.Token{
143 {Type: TokenPunctuator, Data: "$(("},
144 {Type: TokenWhitespace, Data: " "},
145 {Type: TokenNumberLiteral, Data: "0"},
146 {Type: TokenWhitespace, Data: " "},
147 {Type: TokenNumberLiteral, Data: "1"},
148 {Type: TokenWhitespace, Data: " "},
149 {Type: TokenNumberLiteral, Data: "29"},
150 {Type: TokenWhitespace, Data: " "},
151 {Type: TokenNumberLiteral, Data: "0xff"},
152 {Type: TokenWhitespace, Data: " "},
153 {Type: TokenNumberLiteral, Data: "0xDeAdBeEf"},
154 {Type: TokenWhitespace, Data: " "},
155 {Type: TokenNumberLiteral, Data: "0755"},
156 {Type: TokenWhitespace, Data: " "},
157 {Type: TokenNumberLiteral, Data: "2#5"},
158 {Type: TokenWhitespace, Data: " "},
159 {Type: TokenPunctuator, Data: "))"},
160 {Type: parser.TokenDone, Data: ""},
161 },
162 },
163 { // 11
164 "\"abc\" \"de\\nf\" \"stuff`command`more stuff\" \"text $ident $another end\" \"text $(command) end - text ${ident} end\" \"with\nnewline\" 'with\nnewline' $\"a string\" $'a \\'string'",
165 []parser.Token{
166 {Type: TokenString, Data: "\"abc\""},
167 {Type: TokenWhitespace, Data: " "},
168 {Type: TokenString, Data: "\"de\\nf\""},
169 {Type: TokenWhitespace, Data: " "},
170 {Type: TokenStringStart, Data: "\"stuff"},
171 {Type: TokenOpenBacktick, Data: "`"},
172 {Type: TokenWord, Data: "command"},
173 {Type: TokenCloseBacktick, Data: "`"},
174 {Type: TokenStringEnd, Data: "more stuff\""},
175 {Type: TokenWhitespace, Data: " "},
176 {Type: TokenStringStart, Data: "\"text "},
177 {Type: TokenIdentifier, Data: "$ident"},
178 {Type: TokenStringMid, Data: " "},
179 {Type: TokenIdentifier, Data: "$another"},
180 {Type: TokenStringEnd, Data: " end\""},
181 {Type: TokenWhitespace, Data: " "},
182 {Type: TokenStringStart, Data: "\"text "},
183 {Type: TokenPunctuator, Data: "$("},
184 {Type: TokenWord, Data: "command"},
185 {Type: TokenPunctuator, Data: ")"},
186 {Type: TokenStringMid, Data: " end - text "},
187 {Type: TokenPunctuator, Data: "${"},
188 {Type: TokenIdentifier, Data: "ident"},
189 {Type: TokenPunctuator, Data: "}"},
190 {Type: TokenStringEnd, Data: " end\""},
191 {Type: TokenWhitespace, Data: " "},
192 {Type: TokenString, Data: "\"with\nnewline\""},
193 {Type: TokenWhitespace, Data: " "},
194 {Type: TokenString, Data: "'with\nnewline'"},
195 {Type: TokenWhitespace, Data: " "},
196 {Type: TokenString, Data: "$\"a string\""},
197 {Type: TokenWhitespace, Data: " "},
198 {Type: TokenString, Data: "$'a \\'string'"},
199 {Type: parser.TokenDone, Data: ""},
200 },
201 },
202 { // 12
203 "< <<< <& <> > >> >& &>> >| | |& || & && {} + = += `` , $() $(()) (())",
204 []parser.Token{
205 {Type: TokenPunctuator, Data: "<"},
206 {Type: TokenWhitespace, Data: " "},
207 {Type: TokenPunctuator, Data: "<<<"},
208 {Type: TokenWhitespace, Data: " "},
209 {Type: TokenPunctuator, Data: "<&"},
210 {Type: TokenWhitespace, Data: " "},
211 {Type: TokenPunctuator, Data: "<>"},
212 {Type: TokenWhitespace, Data: " "},
213 {Type: TokenPunctuator, Data: ">"},
214 {Type: TokenWhitespace, Data: " "},
215 {Type: TokenPunctuator, Data: ">>"},
216 {Type: TokenWhitespace, Data: " "},
217 {Type: TokenPunctuator, Data: ">&"},
218 {Type: TokenWhitespace, Data: " "},
219 {Type: TokenPunctuator, Data: "&>>"},
220 {Type: TokenWhitespace, Data: " "},
221 {Type: TokenPunctuator, Data: ">|"},
222 {Type: TokenWhitespace, Data: " "},
223 {Type: TokenPunctuator, Data: "|"},
224 {Type: TokenWhitespace, Data: " "},
225 {Type: TokenPunctuator, Data: "|&"},
226 {Type: TokenWhitespace, Data: " "},
227 {Type: TokenPunctuator, Data: "||"},
228 {Type: TokenWhitespace, Data: " "},
229 {Type: TokenPunctuator, Data: "&"},
230 {Type: TokenWhitespace, Data: " "},
231 {Type: TokenPunctuator, Data: "&&"},
232 {Type: TokenWhitespace, Data: " "},
233 {Type: TokenWord, Data: "{}"},
234 {Type: TokenWhitespace, Data: " "},
235 {Type: TokenWord, Data: "+"},
236 {Type: TokenWhitespace, Data: " "},
237 {Type: TokenWord, Data: "="},
238 {Type: TokenWhitespace, Data: " "},
239 {Type: TokenWord, Data: "+="},
240 {Type: TokenWhitespace, Data: " "},
241 {Type: TokenOpenBacktick, Data: "`"},
242 {Type: TokenCloseBacktick, Data: "`"},
243 {Type: TokenWhitespace, Data: " "},
244 {Type: TokenWord, Data: ","},
245 {Type: TokenWhitespace, Data: " "},
246 {Type: TokenPunctuator, Data: "$("},
247 {Type: TokenPunctuator, Data: ")"},
248 {Type: TokenWhitespace, Data: " "},
249 {Type: TokenPunctuator, Data: "$(("},
250 {Type: TokenPunctuator, Data: "))"},
251 {Type: TokenWhitespace, Data: " "},
252 {Type: parser.TokenError, Data: "invalid character"},
253 },
254 },
255 { // 13
256 "$(( + += - -= & &= | |= < << <= > >> >= = == ! != * *= ** / /= % %= ^ ^= ~ ? : , (1) ))",
257 []parser.Token{
258 {Type: TokenPunctuator, Data: "$(("},
259 {Type: TokenWhitespace, Data: " "},
260 {Type: TokenPunctuator, Data: "+"},
261 {Type: TokenWhitespace, Data: " "},
262 {Type: TokenPunctuator, Data: "+="},
263 {Type: TokenWhitespace, Data: " "},
264 {Type: TokenPunctuator, Data: "-"},
265 {Type: TokenWhitespace, Data: " "},
266 {Type: TokenPunctuator, Data: "-="},
267 {Type: TokenWhitespace, Data: " "},
268 {Type: TokenPunctuator, Data: "&"},
269 {Type: TokenWhitespace, Data: " "},
270 {Type: TokenPunctuator, Data: "&="},
271 {Type: TokenWhitespace, Data: " "},
272 {Type: TokenPunctuator, Data: "|"},
273 {Type: TokenWhitespace, Data: " "},
274 {Type: TokenPunctuator, Data: "|="},
275 {Type: TokenWhitespace, Data: " "},
276 {Type: TokenPunctuator, Data: "<"},
277 {Type: TokenWhitespace, Data: " "},
278 {Type: TokenPunctuator, Data: "<<"},
279 {Type: TokenWhitespace, Data: " "},
280 {Type: TokenPunctuator, Data: "<="},
281 {Type: TokenWhitespace, Data: " "},
282 {Type: TokenPunctuator, Data: ">"},
283 {Type: TokenWhitespace, Data: " "},
284 {Type: TokenPunctuator, Data: ">>"},
285 {Type: TokenWhitespace, Data: " "},
286 {Type: TokenPunctuator, Data: ">="},
287 {Type: TokenWhitespace, Data: " "},
288 {Type: TokenPunctuator, Data: "="},
289 {Type: TokenWhitespace, Data: " "},
290 {Type: TokenPunctuator, Data: "=="},
291 {Type: TokenWhitespace, Data: " "},
292 {Type: TokenPunctuator, Data: "!"},
293 {Type: TokenWhitespace, Data: " "},
294 {Type: TokenPunctuator, Data: "!="},
295 {Type: TokenWhitespace, Data: " "},
296 {Type: TokenPunctuator, Data: "*"},
297 {Type: TokenWhitespace, Data: " "},
298 {Type: TokenPunctuator, Data: "*="},
299 {Type: TokenWhitespace, Data: " "},
300 {Type: TokenPunctuator, Data: "**"},
301 {Type: TokenWhitespace, Data: " "},
302 {Type: TokenPunctuator, Data: "/"},
303 {Type: TokenWhitespace, Data: " "},
304 {Type: TokenPunctuator, Data: "/="},
305 {Type: TokenWhitespace, Data: " "},
306 {Type: TokenPunctuator, Data: "%"},
307 {Type: TokenWhitespace, Data: " "},
308 {Type: TokenPunctuator, Data: "%="},
309 {Type: TokenWhitespace, Data: " "},
310 {Type: TokenPunctuator, Data: "^"},
311 {Type: TokenWhitespace, Data: " "},
312 {Type: TokenPunctuator, Data: "^="},
313 {Type: TokenWhitespace, Data: " "},
314 {Type: TokenPunctuator, Data: "~"},
315 {Type: TokenWhitespace, Data: " "},
316 {Type: TokenPunctuator, Data: "?"},
317 {Type: TokenWhitespace, Data: " "},
318 {Type: TokenPunctuator, Data: ":"},
319 {Type: TokenWhitespace, Data: " "},
320 {Type: TokenPunctuator, Data: ","},
321 {Type: TokenWhitespace, Data: " "},
322 {Type: TokenPunctuator, Data: "("},
323 {Type: TokenNumberLiteral, Data: "1"},
324 {Type: TokenPunctuator, Data: ")"},
325 {Type: TokenWhitespace, Data: " "},
326 {Type: TokenPunctuator, Data: "))"},
327 {Type: parser.TokenDone, Data: ""},
328 },
329 },
330 { // 14
331 "$(( a+b 1+2))",
332 []parser.Token{
333 {Type: TokenPunctuator, Data: "$(("},
334 {Type: TokenWhitespace, Data: " "},
335 {Type: TokenWord, Data: "a"},
336 {Type: TokenPunctuator, Data: "+"},
337 {Type: TokenWord, Data: "b"},
338 {Type: TokenWhitespace, Data: " "},
339 {Type: TokenNumberLiteral, Data: "1"},
340 {Type: TokenPunctuator, Data: "+"},
341 {Type: TokenNumberLiteral, Data: "2"},
342 {Type: TokenPunctuator, Data: "))"},
343 {Type: parser.TokenDone, Data: ""},
344 },
345 },
346 { // 15
347 "(( a+b 1+2))",
348 []parser.Token{
349 {Type: TokenPunctuator, Data: "(("},
350 {Type: TokenWhitespace, Data: " "},
351 {Type: TokenWord, Data: "a"},
352 {Type: TokenPunctuator, Data: "+"},
353 {Type: TokenWord, Data: "b"},
354 {Type: TokenWhitespace, Data: " "},
355 {Type: TokenNumberLiteral, Data: "1"},
356 {Type: TokenPunctuator, Data: "+"},
357 {Type: TokenNumberLiteral, Data: "2"},
358 {Type: TokenPunctuator, Data: "))"},
359 {Type: parser.TokenDone, Data: ""},
360 },
361 },
362 { // 16
363 "$(( ( ))",
364 []parser.Token{
365 {Type: TokenPunctuator, Data: "$(("},
366 {Type: TokenWhitespace, Data: " "},
367 {Type: TokenPunctuator, Data: "("},
368 {Type: TokenWhitespace, Data: " "},
369 {Type: TokenPunctuator, Data: ")"},
370 {Type: parser.TokenError, Data: "invalid character"},
371 },
372 },
373 { // 17
374 "$(( ? ))",
375 []parser.Token{
376 {Type: TokenPunctuator, Data: "$(("},
377 {Type: TokenWhitespace, Data: " "},
378 {Type: TokenPunctuator, Data: "?"},
379 {Type: TokenWhitespace, Data: " "},
380 {Type: parser.TokenError, Data: "invalid character"},
381 },
382 },
383 { // 18
384 "$(( ] ))",
385 []parser.Token{
386 {Type: TokenPunctuator, Data: "$(("},
387 {Type: TokenWhitespace, Data: " "},
388 {Type: parser.TokenError, Data: "invalid character"},
389 },
390 },
391 { // 19
392 "{ )",
393 []parser.Token{
394 {Type: TokenPunctuator, Data: "{"},
395 {Type: TokenWhitespace, Data: " "},
396 {Type: parser.TokenError, Data: "invalid character"},
397 },
398 },
399 { // 20
400 "(",
401 []parser.Token{
402 {Type: TokenPunctuator, Data: "("},
403 {Type: parser.TokenError, Data: "unexpected EOF"},
404 },
405 },
406 { // 21
407 "a b(",
408 []parser.Token{
409 {Type: TokenWord, Data: "a"},
410 {Type: TokenWhitespace, Data: " "},
411 {Type: TokenWord, Data: "b"},
412 {Type: parser.TokenError, Data: "invalid character"},
413 },
414 },
415 { // 22
416 "$(",
417 []parser.Token{
418 {Type: TokenPunctuator, Data: "$("},
419 {Type: parser.TokenError, Data: "unexpected EOF"},
420 },
421 },
422 { // 23
423 "$(}",
424 []parser.Token{
425 {Type: TokenPunctuator, Data: "$("},
426 {Type: TokenPunctuator, Data: "}"},
427 {Type: parser.TokenError, Data: "unexpected EOF"},
428 },
429 },
430 { // 24
431 "<<abc\n123\n456\nabc",
432 []parser.Token{
433 {Type: TokenPunctuator, Data: "<<"},
434 {Type: TokenWord, Data: "abc"},
435 {Type: TokenLineTerminator, Data: "\n"},
436 {Type: TokenHeredoc, Data: "123\n456\n"},
437 {Type: TokenHeredocEnd, Data: "abc"},
438 {Type: parser.TokenDone, Data: ""},
439 },
440 },
441 { // 25
442 "<< abc\n123\n456\nabc",
443 []parser.Token{
444 {Type: TokenPunctuator, Data: "<<"},
445 {Type: TokenWhitespace, Data: " "},
446 {Type: TokenWord, Data: "abc"},
447 {Type: TokenLineTerminator, Data: "\n"},
448 {Type: TokenHeredoc, Data: "123\n456\n"},
449 {Type: TokenHeredocEnd, Data: "abc"},
450 {Type: parser.TokenDone, Data: ""},
451 },
452 },
453 { // 26
454 "<<-abc\n123\n456\nabc",
455 []parser.Token{
456 {Type: TokenPunctuator, Data: "<<-"},
457 {Type: TokenWord, Data: "abc"},
458 {Type: TokenLineTerminator, Data: "\n"},
459 {Type: TokenHeredoc, Data: "123\n456\n"},
460 {Type: TokenHeredocEnd, Data: "abc"},
461 {Type: parser.TokenDone, Data: ""},
462 },
463 },
464 { // 27
465 "<<-abc\n\t123\n\t\t456\n\t\t\tabc",
466 []parser.Token{
467 {Type: TokenPunctuator, Data: "<<-"},
468 {Type: TokenWord, Data: "abc"},
469 {Type: TokenLineTerminator, Data: "\n"},
470 {Type: TokenHeredocIndent, Data: "\t"},
471 {Type: TokenHeredoc, Data: "123\n"},
472 {Type: TokenHeredocIndent, Data: "\t\t"},
473 {Type: TokenHeredoc, Data: "456\n"},
474 {Type: TokenHeredocIndent, Data: "\t\t\t"},
475 {Type: TokenHeredocEnd, Data: "abc"},
476 {Type: parser.TokenDone, Data: ""},
477 },
478 },
479 { // 28
480 "<<a'b 'c\n123\n456\nab c\n",
481 []parser.Token{
482 {Type: TokenPunctuator, Data: "<<"},
483 {Type: TokenWord, Data: "a'b 'c"},
484 {Type: TokenLineTerminator, Data: "\n"},
485 {Type: TokenHeredoc, Data: "123\n456\n"},
486 {Type: TokenHeredocEnd, Data: "ab c"},
487 {Type: TokenLineTerminator, Data: "\n"},
488 {Type: parser.TokenDone, Data: ""},
489 },
490 },
491 { // 29
492 "<<def\n123\n456\ndef\nabc",
493 []parser.Token{
494 {Type: TokenPunctuator, Data: "<<"},
495 {Type: TokenWord, Data: "def"},
496 {Type: TokenLineTerminator, Data: "\n"},
497 {Type: TokenHeredoc, Data: "123\n456\n"},
498 {Type: TokenHeredocEnd, Data: "def"},
499 {Type: TokenLineTerminator, Data: "\n"},
500 {Type: TokenWord, Data: "abc"},
501 {Type: parser.TokenDone, Data: ""},
502 },
503 },
504 { // 30
505 "<<def cat\n123\n456\ndef\nabc",
506 []parser.Token{
507 {Type: TokenPunctuator, Data: "<<"},
508 {Type: TokenWord, Data: "def"},
509 {Type: TokenWhitespace, Data: " "},
510 {Type: TokenWord, Data: "cat"},
511 {Type: TokenLineTerminator, Data: "\n"},
512 {Type: TokenHeredoc, Data: "123\n456\n"},
513 {Type: TokenHeredocEnd, Data: "def"},
514 {Type: TokenLineTerminator, Data: "\n"},
515 {Type: TokenWord, Data: "abc"},
516 {Type: parser.TokenDone, Data: ""},
517 },
518 },
519 { // 31
520 "<<abc cat;<<def cat\n123\nabc\n456\ndef",
521 []parser.Token{
522 {Type: TokenPunctuator, Data: "<<"},
523 {Type: TokenWord, Data: "abc"},
524 {Type: TokenWhitespace, Data: " "},
525 {Type: TokenWord, Data: "cat"},
526 {Type: TokenPunctuator, Data: ";"},
527 {Type: TokenPunctuator, Data: "<<"},
528 {Type: TokenWord, Data: "def"},
529 {Type: TokenWhitespace, Data: " "},
530 {Type: TokenWord, Data: "cat"},
531 {Type: TokenLineTerminator, Data: "\n"},
532 {Type: TokenHeredoc, Data: "123\n"},
533 {Type: TokenHeredocEnd, Data: "abc"},
534 {Type: TokenLineTerminator, Data: "\n"},
535 {Type: TokenHeredoc, Data: "456\n"},
536 {Type: TokenHeredocEnd, Data: "def"},
537 {Type: parser.TokenDone, Data: ""},
538 },
539 },
540 { // 32
541 "a <<ABC; b <<DEF\n123\nABC\n456\nDEF",
542 []parser.Token{
543 {Type: TokenWord, Data: "a"},
544 {Type: TokenWhitespace, Data: " "},
545 {Type: TokenPunctuator, Data: "<<"},
546 {Type: TokenWord, Data: "ABC"},
547 {Type: TokenPunctuator, Data: ";"},
548 {Type: TokenWhitespace, Data: " "},
549 {Type: TokenWord, Data: "b"},
550 {Type: TokenWhitespace, Data: " "},
551 {Type: TokenPunctuator, Data: "<<"},
552 {Type: TokenWord, Data: "DEF"},
553 {Type: TokenLineTerminator, Data: "\n"},
554 {Type: TokenHeredoc, Data: "123\n"},
555 {Type: TokenHeredocEnd, Data: "ABC"},
556 {Type: TokenLineTerminator, Data: "\n"},
557 {Type: TokenHeredoc, Data: "456\n"},
558 {Type: TokenHeredocEnd, Data: "DEF"},
559 {Type: parser.TokenDone, Data: ""},
560 },
561 },
562 { // 33
563 "<<abc cat;echo $(<<def cat\n456\ndef\n)\n123\nabc",
564 []parser.Token{
565 {Type: TokenPunctuator, Data: "<<"},
566 {Type: TokenWord, Data: "abc"},
567 {Type: TokenWhitespace, Data: " "},
568 {Type: TokenWord, Data: "cat"},
569 {Type: TokenPunctuator, Data: ";"},
570 {Type: TokenWord, Data: "echo"},
571 {Type: TokenWhitespace, Data: " "},
572 {Type: TokenPunctuator, Data: "$("},
573 {Type: TokenPunctuator, Data: "<<"},
574 {Type: TokenWord, Data: "def"},
575 {Type: TokenWhitespace, Data: " "},
576 {Type: TokenWord, Data: "cat"},
577 {Type: TokenLineTerminator, Data: "\n"},
578 {Type: TokenHeredoc, Data: "456\n"},
579 {Type: TokenHeredocEnd, Data: "def"},
580 {Type: TokenLineTerminator, Data: "\n"},
581 {Type: TokenPunctuator, Data: ")"},
582 {Type: TokenLineTerminator, Data: "\n"},
583 {Type: TokenHeredoc, Data: "123\n"},
584 {Type: TokenHeredocEnd, Data: "abc"},
585 {Type: parser.TokenDone, Data: ""},
586 },
587 },
588 { // 34
589 "<<abc\na$abc\nabc",
590 []parser.Token{
591 {Type: TokenPunctuator, Data: "<<"},
592 {Type: TokenWord, Data: "abc"},
593 {Type: TokenLineTerminator, Data: "\n"},
594 {Type: TokenHeredoc, Data: "a"},
595 {Type: TokenIdentifier, Data: "$abc"},
596 {Type: TokenHeredoc, Data: "\n"},
597 {Type: TokenHeredocEnd, Data: "abc"},
598 {Type: parser.TokenDone, Data: ""},
599 },
600 },
601 { // 35
602 "<<'abc'\na$abc\nabc",
603 []parser.Token{
604 {Type: TokenPunctuator, Data: "<<"},
605 {Type: TokenWord, Data: "'abc'"},
606 {Type: TokenLineTerminator, Data: "\n"},
607 {Type: TokenHeredoc, Data: "a$abc\n"},
608 {Type: TokenHeredocEnd, Data: "abc"},
609 {Type: parser.TokenDone, Data: ""},
610 },
611 },
612 { // 36
613 "<<\"\"abc\na$abc\nabc",
614 []parser.Token{
615 {Type: TokenPunctuator, Data: "<<"},
616 {Type: TokenWord, Data: "\"\"abc"},
617 {Type: TokenLineTerminator, Data: "\n"},
618 {Type: TokenHeredoc, Data: "a$abc\n"},
619 {Type: TokenHeredocEnd, Data: "abc"},
620 {Type: parser.TokenDone, Data: ""},
621 },
622 },
623 { // 37
624 "<<a\\ b\\ c\na$abc\na b c",
625 []parser.Token{
626 {Type: TokenPunctuator, Data: "<<"},
627 {Type: TokenWord, Data: "a\\ b\\ c"},
628 {Type: TokenLineTerminator, Data: "\n"},
629 {Type: TokenHeredoc, Data: "a$abc\n"},
630 {Type: TokenHeredocEnd, Data: "a b c"},
631 {Type: parser.TokenDone, Data: ""},
632 },
633 },
634 { // 38
635 "<<abc\na${abc} $99\nabc",
636 []parser.Token{
637 {Type: TokenPunctuator, Data: "<<"},
638 {Type: TokenWord, Data: "abc"},
639 {Type: TokenLineTerminator, Data: "\n"},
640 {Type: TokenHeredoc, Data: "a"},
641 {Type: TokenPunctuator, Data: "${"},
642 {Type: TokenIdentifier, Data: "abc"},
643 {Type: TokenPunctuator, Data: "}"},
644 {Type: TokenHeredoc, Data: " "},
645 {Type: TokenIdentifier, Data: "$9"},
646 {Type: TokenHeredoc, Data: "9\n"},
647 {Type: TokenHeredocEnd, Data: "abc"},
648 {Type: parser.TokenDone, Data: ""},
649 },
650 },
651 { // 39
652 "<<abc\na$(\necho abc;\n) 1\nabc",
653 []parser.Token{
654 {Type: TokenPunctuator, Data: "<<"},
655 {Type: TokenWord, Data: "abc"},
656 {Type: TokenLineTerminator, Data: "\n"},
657 {Type: TokenHeredoc, Data: "a"},
658 {Type: TokenPunctuator, Data: "$("},
659 {Type: TokenLineTerminator, Data: "\n"},
660 {Type: TokenWord, Data: "echo"},
661 {Type: TokenWhitespace, Data: " "},
662 {Type: TokenWord, Data: "abc"},
663 {Type: TokenPunctuator, Data: ";"},
664 {Type: TokenLineTerminator, Data: "\n"},
665 {Type: TokenPunctuator, Data: ")"},
666 {Type: TokenHeredoc, Data: " 1\n"},
667 {Type: TokenHeredocEnd, Data: "abc"},
668 {Type: parser.TokenDone, Data: ""},
669 },
670 },
671 { // 40
672 "<<abc\n$a\nabc",
673 []parser.Token{
674 {Type: TokenPunctuator, Data: "<<"},
675 {Type: TokenWord, Data: "abc"},
676 {Type: TokenLineTerminator, Data: "\n"},
677 {Type: TokenIdentifier, Data: "$a"},
678 {Type: TokenHeredoc, Data: "\n"},
679 {Type: TokenHeredocEnd, Data: "abc"},
680 {Type: parser.TokenDone, Data: ""},
681 },
682 },
683 { // 41
684 "<<abc\n$$\nabc",
685 []parser.Token{
686 {Type: TokenPunctuator, Data: "<<"},
687 {Type: TokenWord, Data: "abc"},
688 {Type: TokenLineTerminator, Data: "\n"},
689 {Type: TokenIdentifier, Data: "$$"},
690 {Type: TokenHeredoc, Data: "\n"},
691 {Type: TokenHeredocEnd, Data: "abc"},
692 {Type: parser.TokenDone, Data: ""},
693 },
694 },
695 { // 42
696 "<<abc\n$!\nabc",
697 []parser.Token{
698 {Type: TokenPunctuator, Data: "<<"},
699 {Type: TokenWord, Data: "abc"},
700 {Type: TokenLineTerminator, Data: "\n"},
701 {Type: TokenIdentifier, Data: "$!"},
702 {Type: TokenHeredoc, Data: "\n"},
703 {Type: TokenHeredocEnd, Data: "abc"},
704 {Type: parser.TokenDone, Data: ""},
705 },
706 },
707 { // 43
708 "<<abc\n$?\nabc",
709 []parser.Token{
710 {Type: TokenPunctuator, Data: "<<"},
711 {Type: TokenWord, Data: "abc"},
712 {Type: TokenLineTerminator, Data: "\n"},
713 {Type: TokenIdentifier, Data: "$?"},
714 {Type: TokenHeredoc, Data: "\n"},
715 {Type: TokenHeredocEnd, Data: "abc"},
716 {Type: parser.TokenDone, Data: ""},
717 },
718 },
719 { // 44
720 "<<abc\nabc",
721 []parser.Token{
722 {Type: TokenPunctuator, Data: "<<"},
723 {Type: TokenWord, Data: "abc"},
724 {Type: TokenLineTerminator, Data: "\n"},
725 {Type: TokenHeredocEnd, Data: "abc"},
726 {Type: parser.TokenDone, Data: ""},
727 },
728 },
729 { // 45
730 "<<abc\na$(<<def) 1\nabc",
731 []parser.Token{
732 {Type: TokenPunctuator, Data: "<<"},
733 {Type: TokenWord, Data: "abc"},
734 {Type: TokenLineTerminator, Data: "\n"},
735 {Type: TokenHeredoc, Data: "a"},
736 {Type: TokenPunctuator, Data: "$("},
737 {Type: TokenPunctuator, Data: "<<"},
738 {Type: TokenWord, Data: "def"},
739 {Type: parser.TokenError, Data: "invalid character"},
740 },
741 },
742 { // 46
743 "<<abc\na$(<<def cat) 1\nabc",
744 []parser.Token{
745 {Type: TokenPunctuator, Data: "<<"},
746 {Type: TokenWord, Data: "abc"},
747 {Type: TokenLineTerminator, Data: "\n"},
748 {Type: TokenHeredoc, Data: "a"},
749 {Type: TokenPunctuator, Data: "$("},
750 {Type: TokenPunctuator, Data: "<<"},
751 {Type: TokenWord, Data: "def"},
752 {Type: TokenWhitespace, Data: " "},
753 {Type: TokenWord, Data: "cat"},
754 {Type: parser.TokenError, Data: "invalid character"},
755 },
756 },
757 { // 47
758 "<<abc;$(<<def cat)\nabc\ndef\nabc",
759 []parser.Token{
760 {Type: TokenPunctuator, Data: "<<"},
761 {Type: TokenWord, Data: "abc"},
762 {Type: TokenPunctuator, Data: ";"},
763 {Type: TokenPunctuator, Data: "$("},
764 {Type: TokenPunctuator, Data: "<<"},
765 {Type: TokenWord, Data: "def"},
766 {Type: TokenWhitespace, Data: " "},
767 {Type: TokenWord, Data: "cat"},
768 {Type: parser.TokenError, Data: "invalid character"},
769 },
770 },
771 { // 48
772 "<<abc;<<def;$(<<ghi;<<jkl\nghi\njkl\n)\nabc\ndef",
773 []parser.Token{
774 {Type: TokenPunctuator, Data: "<<"},
775 {Type: TokenWord, Data: "abc"},
776 {Type: TokenPunctuator, Data: ";"},
777 {Type: TokenPunctuator, Data: "<<"},
778 {Type: TokenWord, Data: "def"},
779 {Type: TokenPunctuator, Data: ";"},
780 {Type: TokenPunctuator, Data: "$("},
781 {Type: TokenPunctuator, Data: "<<"},
782 {Type: TokenWord, Data: "ghi"},
783 {Type: TokenPunctuator, Data: ";"},
784 {Type: TokenPunctuator, Data: "<<"},
785 {Type: TokenWord, Data: "jkl"},
786 {Type: TokenLineTerminator, Data: "\n"},
787 {Type: TokenHeredocEnd, Data: "ghi"},
788 {Type: TokenLineTerminator, Data: "\n"},
789 {Type: TokenHeredocEnd, Data: "jkl"},
790 {Type: TokenLineTerminator, Data: "\n"},
791 {Type: TokenPunctuator, Data: ")"},
792 {Type: TokenLineTerminator, Data: "\n"},
793 {Type: TokenHeredocEnd, Data: "abc"},
794 {Type: TokenLineTerminator, Data: "\n"},
795 {Type: TokenHeredocEnd, Data: "def"},
796 {Type: parser.TokenDone, Data: ""},
797 },
798 },
799 { // 49
800 "<<a\\\nbc\nabc\ndef\na\nbc",
801 []parser.Token{
802 {Type: TokenPunctuator, Data: "<<"},
803 {Type: TokenWord, Data: "a\\\nbc"},
804 {Type: TokenLineTerminator, Data: "\n"},
805 {Type: TokenHeredoc, Data: "abc\ndef\n"},
806 {Type: TokenHeredocEnd, Data: "a\nbc"},
807 {Type: parser.TokenDone, Data: ""},
808 },
809 },
810 { // 50
811 "<<a;echo ${a/b/\n$c #not-a-comment $d}\n123\na",
812 []parser.Token{
813 {Type: TokenPunctuator, Data: "<<"},
814 {Type: TokenWord, Data: "a"},
815 {Type: TokenPunctuator, Data: ";"},
816 {Type: TokenWord, Data: "echo"},
817 {Type: TokenWhitespace, Data: " "},
818 {Type: TokenPunctuator, Data: "${"},
819 {Type: TokenIdentifier, Data: "a"},
820 {Type: TokenPunctuator, Data: "/"},
821 {Type: TokenPattern, Data: "b"},
822 {Type: TokenPunctuator, Data: "/"},
823 {Type: TokenLineTerminator, Data: "\n"},
824 {Type: TokenIdentifier, Data: "$c"},
825 {Type: TokenWhitespace, Data: " "},
826 {Type: TokenWord, Data: "#not-a-comment"},
827 {Type: TokenWhitespace, Data: " "},
828 {Type: TokenIdentifier, Data: "$d"},
829 {Type: TokenPunctuator, Data: "}"},
830 {Type: TokenLineTerminator, Data: "\n"},
831 {Type: TokenHeredoc, Data: "123\n"},
832 {Type: TokenHeredocEnd, Data: "a"},
833 {Type: parser.TokenDone, Data: ""},
834 },
835 },
836 { // 51
837 "2>1 word",
838 []parser.Token{
839 {Type: TokenNumberLiteral, Data: "2"},
840 {Type: TokenPunctuator, Data: ">"},
841 {Type: TokenWord, Data: "1"},
842 {Type: TokenWhitespace, Data: " "},
843 {Type: TokenWord, Data: "word"},
844 {Type: parser.TokenDone, Data: ""},
845 },
846 },
847 { // 52
848 "time -p cmd",
849 []parser.Token{
850 {Type: TokenKeyword, Data: "time"},
851 {Type: TokenWhitespace, Data: " "},
852 {Type: TokenWord, Data: "-p"},
853 {Type: TokenWhitespace, Data: " "},
854 {Type: TokenWord, Data: "cmd"},
855 {Type: parser.TokenDone, Data: ""},
856 },
857 },
858 { // 53
859 "time -p cmd if",
860 []parser.Token{
861 {Type: TokenKeyword, Data: "time"},
862 {Type: TokenWhitespace, Data: " "},
863 {Type: TokenWord, Data: "-p"},
864 {Type: TokenWhitespace, Data: " "},
865 {Type: TokenWord, Data: "cmd"},
866 {Type: TokenWhitespace, Data: " "},
867 {Type: TokenWord, Data: "if"},
868 {Type: parser.TokenDone, Data: ""},
869 },
870 },
871 { // 54
872 "time -p if a;then b;fi",
873 []parser.Token{
874 {Type: TokenKeyword, Data: "time"},
875 {Type: TokenWhitespace, Data: " "},
876 {Type: TokenWord, Data: "-p"},
877 {Type: TokenWhitespace, Data: " "},
878 {Type: TokenKeyword, Data: "if"},
879 {Type: TokenWhitespace, Data: " "},
880 {Type: TokenWord, Data: "a"},
881 {Type: TokenPunctuator, Data: ";"},
882 {Type: TokenKeyword, Data: "then"},
883 {Type: TokenWhitespace, Data: " "},
884 {Type: TokenWord, Data: "b"},
885 {Type: TokenPunctuator, Data: ";"},
886 {Type: TokenKeyword, Data: "fi"},
887 {Type: parser.TokenDone, Data: ""},
888 },
889 },
890 { // 55
891 "{a..b..2} {a,b,d} a{b,c,d}e a{1..4} {2..10..-1} {-1..-100..5} {a..z..-1}",
892 []parser.Token{
893 {Type: TokenBraceSequenceExpansion, Data: "{"},
894 {Type: TokenWord, Data: "a"},
895 {Type: TokenPunctuator, Data: ".."},
896 {Type: TokenWord, Data: "b"},
897 {Type: TokenPunctuator, Data: ".."},
898 {Type: TokenNumberLiteral, Data: "2"},
899 {Type: TokenBraceExpansion, Data: "}"},
900 {Type: TokenWhitespace, Data: " "},
901 {Type: TokenBraceExpansion, Data: "{"},
902 {Type: TokenWord, Data: "a"},
903 {Type: TokenPunctuator, Data: ","},
904 {Type: TokenWord, Data: "b"},
905 {Type: TokenPunctuator, Data: ","},
906 {Type: TokenWord, Data: "d"},
907 {Type: TokenBraceExpansion, Data: "}"},
908 {Type: TokenWhitespace, Data: " "},
909 {Type: TokenWord, Data: "a"},
910 {Type: TokenBraceExpansion, Data: "{"},
911 {Type: TokenWord, Data: "b"},
912 {Type: TokenPunctuator, Data: ","},
913 {Type: TokenWord, Data: "c"},
914 {Type: TokenPunctuator, Data: ","},
915 {Type: TokenWord, Data: "d"},
916 {Type: TokenBraceExpansion, Data: "}"},
917 {Type: TokenWord, Data: "e"},
918 {Type: TokenWhitespace, Data: " "},
919 {Type: TokenWord, Data: "a"},
920 {Type: TokenBraceSequenceExpansion, Data: "{"},
921 {Type: TokenNumberLiteral, Data: "1"},
922 {Type: TokenPunctuator, Data: ".."},
923 {Type: TokenNumberLiteral, Data: "4"},
924 {Type: TokenBraceExpansion, Data: "}"},
925 {Type: TokenWhitespace, Data: " "},
926 {Type: TokenBraceSequenceExpansion, Data: "{"},
927 {Type: TokenNumberLiteral, Data: "2"},
928 {Type: TokenPunctuator, Data: ".."},
929 {Type: TokenNumberLiteral, Data: "10"},
930 {Type: TokenPunctuator, Data: ".."},
931 {Type: TokenNumberLiteral, Data: "-1"},
932 {Type: TokenBraceExpansion, Data: "}"},
933 {Type: TokenWhitespace, Data: " "},
934 {Type: TokenBraceSequenceExpansion, Data: "{"},
935 {Type: TokenNumberLiteral, Data: "-1"},
936 {Type: TokenPunctuator, Data: ".."},
937 {Type: TokenNumberLiteral, Data: "-100"},
938 {Type: TokenPunctuator, Data: ".."},
939 {Type: TokenNumberLiteral, Data: "5"},
940 {Type: TokenBraceExpansion, Data: "}"},
941 {Type: TokenWhitespace, Data: " "},
942 {Type: TokenBraceSequenceExpansion, Data: "{"},
943 {Type: TokenWord, Data: "a"},
944 {Type: TokenPunctuator, Data: ".."},
945 {Type: TokenWord, Data: "z"},
946 {Type: TokenPunctuator, Data: ".."},
947 {Type: TokenNumberLiteral, Data: "-1"},
948 {Type: TokenBraceExpansion, Data: "}"},
949 {Type: parser.TokenDone, Data: ""},
950 },
951 },
952 { // 56
953 "{a..2}",
954 []parser.Token{
955 {Type: TokenWord, Data: "{a..2}"},
956 {Type: parser.TokenDone, Data: ""},
957 },
958 },
959 { // 57
960 "{a..b..c}",
961 []parser.Token{
962 {Type: TokenWord, Data: "{a..b..c}"},
963 {Type: parser.TokenDone, Data: ""},
964 },
965 },
966 { // 58
967 "{a..b2}",
968 []parser.Token{
969 {Type: TokenWord, Data: "{a..b2}"},
970 {Type: parser.TokenDone, Data: ""},
971 },
972 },
973 { // 59
974 "{aa..bb}",
975 []parser.Token{
976 {Type: TokenWord, Data: "{aa..bb}"},
977 {Type: parser.TokenDone, Data: ""},
978 },
979 },
980 { // 60
981 "{_a,_b,_c}",
982 []parser.Token{
983 {Type: TokenBraceExpansion, Data: "{"},
984 {Type: TokenWord, Data: "_a"},
985 {Type: TokenPunctuator, Data: ","},
986 {Type: TokenWord, Data: "_b"},
987 {Type: TokenPunctuator, Data: ","},
988 {Type: TokenWord, Data: "_c"},
989 {Type: TokenBraceExpansion, Data: "}"},
990 {Type: parser.TokenDone, Data: ""},
991 },
992 },
993 { // 61
994 "{1,2,3}",
995 []parser.Token{
996 {Type: TokenBraceExpansion, Data: "{"},
997 {Type: TokenWord, Data: "1"},
998 {Type: TokenPunctuator, Data: ","},
999 {Type: TokenWord, Data: "2"},
1000 {Type: TokenPunctuator, Data: ","},
1001 {Type: TokenWord, Data: "3"},
1002 {Type: TokenBraceExpansion, Data: "}"},
1003 {Type: parser.TokenDone, Data: ""},
1004 },
1005 },
1006 { // 62
1007 "{\"1\",\"2\",\"3\"}",
1008 []parser.Token{
1009 {Type: TokenBraceExpansion, Data: "{"},
1010 {Type: TokenString, Data: "\"1\""},
1011 {Type: TokenPunctuator, Data: ","},
1012 {Type: TokenString, Data: "\"2\""},
1013 {Type: TokenPunctuator, Data: ","},
1014 {Type: TokenString, Data: "\"3\""},
1015 {Type: TokenBraceExpansion, Data: "}"},
1016 {Type: parser.TokenDone, Data: ""},
1017 },
1018 },
1019 { // 63
1020 "{1..}",
1021 []parser.Token{
1022 {Type: TokenWord, Data: "{1..}"},
1023 {Type: parser.TokenDone, Data: ""},
1024 },
1025 },
1026 { // 64
1027 "{1..3..a}",
1028 []parser.Token{
1029 {Type: TokenWord, Data: "{1..3..a}"},
1030 {Type: parser.TokenDone, Data: ""},
1031 },
1032 },
1033 { // 65
1034 "{1..3..1a}",
1035 []parser.Token{
1036 {Type: TokenWord, Data: "{1..3..1a}"},
1037 {Type: parser.TokenDone, Data: ""},
1038 },
1039 },
1040 { // 66
1041 "{-",
1042 []parser.Token{
1043 {Type: TokenWord, Data: "{-"},
1044 {Type: parser.TokenDone, Data: ""},
1045 },
1046 },
1047 { // 67
1048 "{-,_}",
1049 []parser.Token{
1050 {Type: TokenBraceExpansion, Data: "{"},
1051 {Type: TokenWord, Data: "-"},
1052 {Type: TokenPunctuator, Data: ","},
1053 {Type: TokenWord, Data: "_"},
1054 {Type: TokenBraceExpansion, Data: "}"},
1055 {Type: parser.TokenDone, Data: ""},
1056 },
1057 },
1058 { // 68
1059 "{-\\n}",
1060 []parser.Token{
1061 {Type: TokenWord, Data: "{-\\n}"},
1062 {Type: parser.TokenDone, Data: ""},
1063 },
1064 },
1065 { // 69
1066 "{-,\\n}",
1067 []parser.Token{
1068 {Type: TokenBraceExpansion, Data: "{"},
1069 {Type: TokenWord, Data: "-"},
1070 {Type: TokenPunctuator, Data: ","},
1071 {Type: TokenWord, Data: "\\n"},
1072 {Type: TokenBraceExpansion, Data: "}"},
1073 {Type: parser.TokenDone, Data: ""},
1074 },
1075 },
1076 { // 70
1077 "{a,b;}",
1078 []parser.Token{
1079 {Type: TokenWord, Data: "{a,b"},
1080 {Type: TokenPunctuator, Data: ";"},
1081 {Type: TokenPunctuator, Data: "}"},
1082 {Type: parser.TokenDone, Data: ""},
1083 },
1084 },
1085 { // 71
1086 "{a,b }",
1087 []parser.Token{
1088 {Type: TokenWord, Data: "{a,b"},
1089 {Type: TokenWhitespace, Data: " "},
1090 {Type: TokenPunctuator, Data: "}"},
1091 {Type: parser.TokenDone, Data: ""},
1092 },
1093 },
1094 { // 72
1095 "{a,b\n}",
1096 []parser.Token{
1097 {Type: TokenWord, Data: "{a,b"},
1098 {Type: TokenLineTerminator, Data: "\n"},
1099 {Type: TokenPunctuator, Data: "}"},
1100 {Type: parser.TokenDone, Data: ""},
1101 },
1102 },
1103 { // 73
1104 "a={123",
1105 []parser.Token{
1106 {Type: TokenIdentifierAssign, Data: "a"},
1107 {Type: TokenAssignment, Data: "="},
1108 {Type: TokenWord, Data: "{123"},
1109 {Type: parser.TokenDone, Data: ""},
1110 },
1111 },
1112 { // 74
1113 "word{ word{a} word{\nword{",
1114 []parser.Token{
1115 {Type: TokenWord, Data: "word{"},
1116 {Type: TokenWhitespace, Data: " "},
1117 {Type: TokenWord, Data: "word{a}"},
1118 {Type: TokenWhitespace, Data: " "},
1119 {Type: TokenWord, Data: "word{"},
1120 {Type: TokenLineTerminator, Data: "\n"},
1121 {Type: TokenWord, Data: "word{"},
1122 {Type: parser.TokenDone, Data: ""},
1123 },
1124 },
1125 { // 75
1126 "{ echo 123; echo 456; }",
1127 []parser.Token{
1128 {Type: TokenPunctuator, Data: "{"},
1129 {Type: TokenWhitespace, Data: " "},
1130 {Type: TokenWord, Data: "echo"},
1131 {Type: TokenWhitespace, Data: " "},
1132 {Type: TokenWord, Data: "123"},
1133 {Type: TokenPunctuator, Data: ";"},
1134 {Type: TokenWhitespace, Data: " "},
1135 {Type: TokenWord, Data: "echo"},
1136 {Type: TokenWhitespace, Data: " "},
1137 {Type: TokenWord, Data: "456"},
1138 {Type: TokenPunctuator, Data: ";"},
1139 {Type: TokenWhitespace, Data: " "},
1140 {Type: TokenPunctuator, Data: "}"},
1141 {Type: parser.TokenDone, Data: ""},
1142 },
1143 },
1144 { // 76
1145 "(echo 123; echo 456)",
1146 []parser.Token{
1147 {Type: TokenPunctuator, Data: "("},
1148 {Type: TokenWord, Data: "echo"},
1149 {Type: TokenWhitespace, Data: " "},
1150 {Type: TokenWord, Data: "123"},
1151 {Type: TokenPunctuator, Data: ";"},
1152 {Type: TokenWhitespace, Data: " "},
1153 {Type: TokenWord, Data: "echo"},
1154 {Type: TokenWhitespace, Data: " "},
1155 {Type: TokenWord, Data: "456"},
1156 {Type: TokenPunctuator, Data: ")"},
1157 {Type: parser.TokenDone, Data: ""},
1158 },
1159 },
1160 { // 77
1161 "`a` `echo \\`abc\\`` echo \"a`echo \"1\\`echo u\\\\\\`echo 123\\\\\\`v\\`3\"`c\"",
1162 []parser.Token{
1163 {Type: TokenOpenBacktick, Data: "`"},
1164 {Type: TokenWord, Data: "a"},
1165 {Type: TokenCloseBacktick, Data: "`"},
1166 {Type: TokenWhitespace, Data: " "},
1167 {Type: TokenOpenBacktick, Data: "`"},
1168 {Type: TokenWord, Data: "echo"},
1169 {Type: TokenWhitespace, Data: " "},
1170 {Type: TokenOpenBacktick, Data: "\\`"},
1171 {Type: TokenWord, Data: "abc"},
1172 {Type: TokenCloseBacktick, Data: "\\`"},
1173 {Type: TokenCloseBacktick, Data: "`"},
1174 {Type: TokenWhitespace, Data: " "},
1175 {Type: TokenWord, Data: "echo"},
1176 {Type: TokenWhitespace, Data: " "},
1177 {Type: TokenStringStart, Data: "\"a"},
1178 {Type: TokenOpenBacktick, Data: "`"},
1179 {Type: TokenWord, Data: "echo"},
1180 {Type: TokenWhitespace, Data: " "},
1181 {Type: TokenStringStart, Data: "\"1"},
1182 {Type: TokenOpenBacktick, Data: "\\`"},
1183 {Type: TokenWord, Data: "echo"},
1184 {Type: TokenWhitespace, Data: " "},
1185 {Type: TokenWord, Data: "u"},
1186 {Type: TokenOpenBacktick, Data: "\\\\\\`"},
1187 {Type: TokenWord, Data: "echo"},
1188 {Type: TokenWhitespace, Data: " "},
1189 {Type: TokenWord, Data: "123"},
1190 {Type: TokenCloseBacktick, Data: "\\\\\\`"},
1191 {Type: TokenWord, Data: "v"},
1192 {Type: TokenCloseBacktick, Data: "\\`"},
1193 {Type: TokenStringEnd, Data: "3\""},
1194 {Type: TokenCloseBacktick, Data: "`"},
1195 {Type: TokenStringEnd, Data: "c\""},
1196 {Type: parser.TokenDone, Data: ""},
1197 },
1198 },
1199 { // 78
1200 "`\\``",
1201 []parser.Token{
1202 {Type: TokenOpenBacktick, Data: "`"},
1203 {Type: TokenOpenBacktick, Data: "\\`"},
1204 {Type: parser.TokenError, Data: "incorrect backtick depth"},
1205 },
1206 },
1207 { // 79
1208 "`\\`\\\\\\``",
1209 []parser.Token{
1210 {Type: TokenOpenBacktick, Data: "`"},
1211 {Type: TokenOpenBacktick, Data: "\\`"},
1212 {Type: TokenOpenBacktick, Data: "\\\\\\`"},
1213 {Type: parser.TokenError, Data: "incorrect backtick depth"},
1214 },
1215 },
1216 { // 80
1217 "`\\`\\\\\\`\\`",
1218 []parser.Token{
1219 {Type: TokenOpenBacktick, Data: "`"},
1220 {Type: TokenOpenBacktick, Data: "\\`"},
1221 {Type: TokenOpenBacktick, Data: "\\\\\\`"},
1222 {Type: parser.TokenError, Data: "incorrect backtick depth"},
1223 },
1224 },
1225 { // 81
1226 "`\\$abc`",
1227 []parser.Token{
1228 {Type: TokenOpenBacktick, Data: "`"},
1229 {Type: TokenIdentifier, Data: "\\$abc"},
1230 {Type: TokenCloseBacktick, Data: "`"},
1231 {Type: parser.TokenDone, Data: ""},
1232 },
1233 },
1234 { // 82
1235 "echo `echo \\\"abc\\\"`",
1236 []parser.Token{
1237 {Type: TokenWord, Data: "echo"},
1238 {Type: TokenWhitespace, Data: " "},
1239 {Type: TokenOpenBacktick, Data: "`"},
1240 {Type: TokenWord, Data: "echo"},
1241 {Type: TokenWhitespace, Data: " "},
1242 {Type: TokenWord, Data: "\\\"abc\\\""},
1243 {Type: TokenCloseBacktick, Data: "`"},
1244 {Type: parser.TokenDone, Data: ""},
1245 },
1246 },
1247 { // 83
1248 "\\\"abc\\\"",
1249 []parser.Token{
1250 {Type: TokenWord, Data: "\\\"abc\\\""},
1251 {Type: parser.TokenDone, Data: ""},
1252 },
1253 },
1254 { // 84
1255 "\\",
1256 []parser.Token{
1257 {Type: parser.TokenError, Data: "unexpected EOF"},
1258 },
1259 },
1260 { // 85
1261 "{abc}>2",
1262 []parser.Token{
1263 {Type: TokenWord, Data: "{abc}"},
1264 {Type: TokenPunctuator, Data: ">"},
1265 {Type: TokenWord, Data: "2"},
1266 {Type: parser.TokenDone, Data: ""},
1267 },
1268 },
1269 { // 86
1270 "<&1-",
1271 []parser.Token{
1272 {Type: TokenPunctuator, Data: "<&"},
1273 {Type: TokenWord, Data: "1-"},
1274 {Type: parser.TokenDone, Data: ""},
1275 },
1276 },
1277 { // 87
1278 "<(a)",
1279 []parser.Token{
1280 {Type: TokenPunctuator, Data: "<("},
1281 {Type: TokenWord, Data: "a"},
1282 {Type: TokenPunctuator, Data: ")"},
1283 {Type: parser.TokenDone, Data: ""},
1284 },
1285 },
1286 { // 88
1287 "a >(b)",
1288 []parser.Token{
1289 {Type: TokenWord, Data: "a"},
1290 {Type: TokenWhitespace, Data: " "},
1291 {Type: TokenPunctuator, Data: ">("},
1292 {Type: TokenWord, Data: "b"},
1293 {Type: TokenPunctuator, Data: ")"},
1294 {Type: parser.TokenDone, Data: ""},
1295 },
1296 },
1297 { // 89
1298 ": ${!a} ${!a*} ${!a@} ${!a[@]} ${!a[*]} ${a:1:2} ${a: -1 : -2} ${a:1} ${a:-b} ${a:=b} ${a:?a is unset} ${a:+a is set} ${#a} ${#} ${a#b} ${a##b} ${a%b} ${a%%b} ${a/b/c} ${a//b/c} ${a/#b/c} ${a/%b/c} ${a^b} ${a^^b} ${a,b} ${a,,b} ${a@Q} ${a@a} ${a@P}",
1299 []parser.Token{
1300 {Type: TokenWord, Data: ":"},
1301 {Type: TokenWhitespace, Data: " "},
1302 {Type: TokenPunctuator, Data: "${"},
1303 {Type: TokenPunctuator, Data: "!"},
1304 {Type: TokenIdentifier, Data: "a"},
1305 {Type: TokenPunctuator, Data: "}"},
1306 {Type: TokenWhitespace, Data: " "},
1307 {Type: TokenPunctuator, Data: "${"},
1308 {Type: TokenPunctuator, Data: "!"},
1309 {Type: TokenIdentifier, Data: "a"},
1310 {Type: TokenPunctuator, Data: "*"},
1311 {Type: TokenPunctuator, Data: "}"},
1312 {Type: TokenWhitespace, Data: " "},
1313 {Type: TokenPunctuator, Data: "${"},
1314 {Type: TokenPunctuator, Data: "!"},
1315 {Type: TokenIdentifier, Data: "a"},
1316 {Type: TokenPunctuator, Data: "@"},
1317 {Type: TokenPunctuator, Data: "}"},
1318 {Type: TokenWhitespace, Data: " "},
1319 {Type: TokenPunctuator, Data: "${"},
1320 {Type: TokenPunctuator, Data: "!"},
1321 {Type: TokenIdentifier, Data: "a"},
1322 {Type: TokenPunctuator, Data: "["},
1323 {Type: TokenWord, Data: "@"},
1324 {Type: TokenPunctuator, Data: "]"},
1325 {Type: TokenPunctuator, Data: "}"},
1326 {Type: TokenWhitespace, Data: " "},
1327 {Type: TokenPunctuator, Data: "${"},
1328 {Type: TokenPunctuator, Data: "!"},
1329 {Type: TokenIdentifier, Data: "a"},
1330 {Type: TokenPunctuator, Data: "["},
1331 {Type: TokenWord, Data: "*"},
1332 {Type: TokenPunctuator, Data: "]"},
1333 {Type: TokenPunctuator, Data: "}"},
1334 {Type: TokenWhitespace, Data: " "},
1335 {Type: TokenPunctuator, Data: "${"},
1336 {Type: TokenIdentifier, Data: "a"},
1337 {Type: TokenPunctuator, Data: ":"},
1338 {Type: TokenWord, Data: "1"},
1339 {Type: TokenPunctuator, Data: ":"},
1340 {Type: TokenWord, Data: "2"},
1341 {Type: TokenPunctuator, Data: "}"},
1342 {Type: TokenWhitespace, Data: " "},
1343 {Type: TokenPunctuator, Data: "${"},
1344 {Type: TokenIdentifier, Data: "a"},
1345 {Type: TokenPunctuator, Data: ":"},
1346 {Type: TokenWhitespace, Data: " "},
1347 {Type: TokenWord, Data: "-1"},
1348 {Type: TokenWhitespace, Data: " "},
1349 {Type: TokenPunctuator, Data: ":"},
1350 {Type: TokenWhitespace, Data: " "},
1351 {Type: TokenWord, Data: "-2"},
1352 {Type: TokenPunctuator, Data: "}"},
1353 {Type: TokenWhitespace, Data: " "},
1354 {Type: TokenPunctuator, Data: "${"},
1355 {Type: TokenIdentifier, Data: "a"},
1356 {Type: TokenPunctuator, Data: ":"},
1357 {Type: TokenWord, Data: "1"},
1358 {Type: TokenPunctuator, Data: "}"},
1359 {Type: TokenWhitespace, Data: " "},
1360 {Type: TokenPunctuator, Data: "${"},
1361 {Type: TokenIdentifier, Data: "a"},
1362 {Type: TokenPunctuator, Data: ":-"},
1363 {Type: TokenWord, Data: "b"},
1364 {Type: TokenPunctuator, Data: "}"},
1365 {Type: TokenWhitespace, Data: " "},
1366 {Type: TokenPunctuator, Data: "${"},
1367 {Type: TokenIdentifier, Data: "a"},
1368 {Type: TokenPunctuator, Data: ":="},
1369 {Type: TokenWord, Data: "b"},
1370 {Type: TokenPunctuator, Data: "}"},
1371 {Type: TokenWhitespace, Data: " "},
1372 {Type: TokenPunctuator, Data: "${"},
1373 {Type: TokenIdentifier, Data: "a"},
1374 {Type: TokenPunctuator, Data: ":?"},
1375 {Type: TokenWord, Data: "a"},
1376 {Type: TokenWhitespace, Data: " "},
1377 {Type: TokenWord, Data: "is"},
1378 {Type: TokenWhitespace, Data: " "},
1379 {Type: TokenWord, Data: "unset"},
1380 {Type: TokenPunctuator, Data: "}"},
1381 {Type: TokenWhitespace, Data: " "},
1382 {Type: TokenPunctuator, Data: "${"},
1383 {Type: TokenIdentifier, Data: "a"},
1384 {Type: TokenPunctuator, Data: ":+"},
1385 {Type: TokenWord, Data: "a"},
1386 {Type: TokenWhitespace, Data: " "},
1387 {Type: TokenWord, Data: "is"},
1388 {Type: TokenWhitespace, Data: " "},
1389 {Type: TokenWord, Data: "set"},
1390 {Type: TokenPunctuator, Data: "}"},
1391 {Type: TokenWhitespace, Data: " "},
1392 {Type: TokenPunctuator, Data: "${"},
1393 {Type: TokenPunctuator, Data: "#"},
1394 {Type: TokenIdentifier, Data: "a"},
1395 {Type: TokenPunctuator, Data: "}"},
1396 {Type: TokenWhitespace, Data: " "},
1397 {Type: TokenPunctuator, Data: "${"},
1398 {Type: TokenKeyword, Data: "#"},
1399 {Type: TokenPunctuator, Data: "}"},
1400 {Type: TokenWhitespace, Data: " "},
1401 {Type: TokenPunctuator, Data: "${"},
1402 {Type: TokenIdentifier, Data: "a"},
1403 {Type: TokenPunctuator, Data: "#"},
1404 {Type: TokenPattern, Data: "b"},
1405 {Type: TokenPunctuator, Data: "}"},
1406 {Type: TokenWhitespace, Data: " "},
1407 {Type: TokenPunctuator, Data: "${"},
1408 {Type: TokenIdentifier, Data: "a"},
1409 {Type: TokenPunctuator, Data: "##"},
1410 {Type: TokenPattern, Data: "b"},
1411 {Type: TokenPunctuator, Data: "}"},
1412 {Type: TokenWhitespace, Data: " "},
1413 {Type: TokenPunctuator, Data: "${"},
1414 {Type: TokenIdentifier, Data: "a"},
1415 {Type: TokenPunctuator, Data: "%"},
1416 {Type: TokenPattern, Data: "b"},
1417 {Type: TokenPunctuator, Data: "}"},
1418 {Type: TokenWhitespace, Data: " "},
1419 {Type: TokenPunctuator, Data: "${"},
1420 {Type: TokenIdentifier, Data: "a"},
1421 {Type: TokenPunctuator, Data: "%%"},
1422 {Type: TokenPattern, Data: "b"},
1423 {Type: TokenPunctuator, Data: "}"},
1424 {Type: TokenWhitespace, Data: " "},
1425 {Type: TokenPunctuator, Data: "${"},
1426 {Type: TokenIdentifier, Data: "a"},
1427 {Type: TokenPunctuator, Data: "/"},
1428 {Type: TokenPattern, Data: "b"},
1429 {Type: TokenPunctuator, Data: "/"},
1430 {Type: TokenWord, Data: "c"},
1431 {Type: TokenPunctuator, Data: "}"},
1432 {Type: TokenWhitespace, Data: " "},
1433 {Type: TokenPunctuator, Data: "${"},
1434 {Type: TokenIdentifier, Data: "a"},
1435 {Type: TokenPunctuator, Data: "//"},
1436 {Type: TokenPattern, Data: "b"},
1437 {Type: TokenPunctuator, Data: "/"},
1438 {Type: TokenWord, Data: "c"},
1439 {Type: TokenPunctuator, Data: "}"},
1440 {Type: TokenWhitespace, Data: " "},
1441 {Type: TokenPunctuator, Data: "${"},
1442 {Type: TokenIdentifier, Data: "a"},
1443 {Type: TokenPunctuator, Data: "/#"},
1444 {Type: TokenPattern, Data: "b"},
1445 {Type: TokenPunctuator, Data: "/"},
1446 {Type: TokenWord, Data: "c"},
1447 {Type: TokenPunctuator, Data: "}"},
1448 {Type: TokenWhitespace, Data: " "},
1449 {Type: TokenPunctuator, Data: "${"},
1450 {Type: TokenIdentifier, Data: "a"},
1451 {Type: TokenPunctuator, Data: "/%"},
1452 {Type: TokenPattern, Data: "b"},
1453 {Type: TokenPunctuator, Data: "/"},
1454 {Type: TokenWord, Data: "c"},
1455 {Type: TokenPunctuator, Data: "}"},
1456 {Type: TokenWhitespace, Data: " "},
1457 {Type: TokenPunctuator, Data: "${"},
1458 {Type: TokenIdentifier, Data: "a"},
1459 {Type: TokenPunctuator, Data: "^"},
1460 {Type: TokenPattern, Data: "b"},
1461 {Type: TokenPunctuator, Data: "}"},
1462 {Type: TokenWhitespace, Data: " "},
1463 {Type: TokenPunctuator, Data: "${"},
1464 {Type: TokenIdentifier, Data: "a"},
1465 {Type: TokenPunctuator, Data: "^^"},
1466 {Type: TokenPattern, Data: "b"},
1467 {Type: TokenPunctuator, Data: "}"},
1468 {Type: TokenWhitespace, Data: " "},
1469 {Type: TokenPunctuator, Data: "${"},
1470 {Type: TokenIdentifier, Data: "a"},
1471 {Type: TokenPunctuator, Data: ","},
1472 {Type: TokenPattern, Data: "b"},
1473 {Type: TokenPunctuator, Data: "}"},
1474 {Type: TokenWhitespace, Data: " "},
1475 {Type: TokenPunctuator, Data: "${"},
1476 {Type: TokenIdentifier, Data: "a"},
1477 {Type: TokenPunctuator, Data: ",,"},
1478 {Type: TokenPattern, Data: "b"},
1479 {Type: TokenPunctuator, Data: "}"},
1480 {Type: TokenWhitespace, Data: " "},
1481 {Type: TokenPunctuator, Data: "${"},
1482 {Type: TokenIdentifier, Data: "a"},
1483 {Type: TokenPunctuator, Data: "@"},
1484 {Type: TokenBraceWord, Data: "Q"},
1485 {Type: TokenPunctuator, Data: "}"},
1486 {Type: TokenWhitespace, Data: " "},
1487 {Type: TokenPunctuator, Data: "${"},
1488 {Type: TokenIdentifier, Data: "a"},
1489 {Type: TokenPunctuator, Data: "@"},
1490 {Type: TokenBraceWord, Data: "a"},
1491 {Type: TokenPunctuator, Data: "}"},
1492 {Type: TokenWhitespace, Data: " "},
1493 {Type: TokenPunctuator, Data: "${"},
1494 {Type: TokenIdentifier, Data: "a"},
1495 {Type: TokenPunctuator, Data: "@"},
1496 {Type: TokenBraceWord, Data: "P"},
1497 {Type: TokenPunctuator, Data: "}"},
1498 {Type: parser.TokenDone, Data: ""},
1499 },
1500 },
1501 { // 90
1502 "${a[@@]}",
1503 []parser.Token{
1504 {Type: TokenPunctuator, Data: "${"},
1505 {Type: TokenIdentifier, Data: "a"},
1506 {Type: TokenPunctuator, Data: "["},
1507 {Type: TokenWord, Data: "@"},
1508 {Type: parser.TokenError, Data: "invalid character"},
1509 },
1510 },
1511 { // 91
1512 "${a/[/c}",
1513 []parser.Token{
1514 {Type: TokenPunctuator, Data: "${"},
1515 {Type: TokenIdentifier, Data: "a"},
1516 {Type: TokenPunctuator, Data: "/"},
1517 {Type: parser.TokenError, Data: "unexpected EOF"},
1518 },
1519 },
1520 { // 92
1521 "${a/\\[/c}",
1522 []parser.Token{
1523 {Type: TokenPunctuator, Data: "${"},
1524 {Type: TokenIdentifier, Data: "a"},
1525 {Type: TokenPunctuator, Data: "/"},
1526 {Type: TokenPattern, Data: "\\["},
1527 {Type: TokenPunctuator, Data: "/"},
1528 {Type: TokenWord, Data: "c"},
1529 {Type: TokenPunctuator, Data: "}"},
1530 {Type: parser.TokenDone, Data: ""},
1531 },
1532 },
1533 { // 93
1534 "${a/[b]/c}",
1535 []parser.Token{
1536 {Type: TokenPunctuator, Data: "${"},
1537 {Type: TokenIdentifier, Data: "a"},
1538 {Type: TokenPunctuator, Data: "/"},
1539 {Type: TokenPattern, Data: "[b]"},
1540 {Type: TokenPunctuator, Data: "/"},
1541 {Type: TokenWord, Data: "c"},
1542 {Type: TokenPunctuator, Data: "}"},
1543 {Type: parser.TokenDone, Data: ""},
1544 },
1545 },
1546 { // 94
1547 "${a/(/c}",
1548 []parser.Token{
1549 {Type: TokenPunctuator, Data: "${"},
1550 {Type: TokenIdentifier, Data: "a"},
1551 {Type: TokenPunctuator, Data: "/"},
1552 {Type: parser.TokenError, Data: "invalid character"},
1553 },
1554 },
1555 { // 95
1556 "${a/\\(/c}",
1557 []parser.Token{
1558 {Type: TokenPunctuator, Data: "${"},
1559 {Type: TokenIdentifier, Data: "a"},
1560 {Type: TokenPunctuator, Data: "/"},
1561 {Type: TokenPattern, Data: "\\("},
1562 {Type: TokenPunctuator, Data: "/"},
1563 {Type: TokenWord, Data: "c"},
1564 {Type: TokenPunctuator, Data: "}"},
1565 {Type: parser.TokenDone, Data: ""},
1566 },
1567 },
1568 { // 96
1569 "${a/(b)/c}",
1570 []parser.Token{
1571 {Type: TokenPunctuator, Data: "${"},
1572 {Type: TokenIdentifier, Data: "a"},
1573 {Type: TokenPunctuator, Data: "/"},
1574 {Type: TokenPattern, Data: "(b)"},
1575 {Type: TokenPunctuator, Data: "/"},
1576 {Type: TokenWord, Data: "c"},
1577 {Type: TokenPunctuator, Data: "}"},
1578 {Type: parser.TokenDone, Data: ""},
1579 },
1580 },
1581 { // 97
1582 "${a@Z}",
1583 []parser.Token{
1584 {Type: TokenPunctuator, Data: "${"},
1585 {Type: TokenIdentifier, Data: "a"},
1586 {Type: TokenPunctuator, Data: "@"},
1587 {Type: parser.TokenError, Data: "invalid parameter expansion"},
1588 },
1589 },
1590 { // 98
1591 "${@} ${*}",
1592 []parser.Token{
1593 {Type: TokenPunctuator, Data: "${"},
1594 {Type: TokenKeyword, Data: "@"},
1595 {Type: TokenPunctuator, Data: "}"},
1596 {Type: TokenWhitespace, Data: " "},
1597 {Type: TokenPunctuator, Data: "${"},
1598 {Type: TokenKeyword, Data: "*"},
1599 {Type: TokenPunctuator, Data: "}"},
1600 {Type: parser.TokenDone, Data: ""},
1601 },
1602 },
1603 { // 99
1604 "$() $(()) `` ${}",
1605 []parser.Token{
1606 {Type: TokenPunctuator, Data: "$("},
1607 {Type: TokenPunctuator, Data: ")"},
1608 {Type: TokenWhitespace, Data: " "},
1609 {Type: TokenPunctuator, Data: "$(("},
1610 {Type: TokenPunctuator, Data: "))"},
1611 {Type: TokenWhitespace, Data: " "},
1612 {Type: TokenOpenBacktick, Data: "`"},
1613 {Type: TokenCloseBacktick, Data: "`"},
1614 {Type: TokenWhitespace, Data: " "},
1615 {Type: TokenPunctuator, Data: "${"},
1616 {Type: parser.TokenError, Data: "invalid parameter expansion"},
1617 },
1618 },
1619 { // 100
1620 "case a in b)c;;esac",
1621 []parser.Token{
1622 {Type: TokenKeyword, Data: "case"},
1623 {Type: TokenWhitespace, Data: " "},
1624 {Type: TokenWord, Data: "a"},
1625 {Type: TokenWhitespace, Data: " "},
1626 {Type: TokenKeyword, Data: "in"},
1627 {Type: TokenWhitespace, Data: " "},
1628 {Type: TokenWord, Data: "b"},
1629 {Type: TokenPunctuator, Data: ")"},
1630 {Type: TokenWord, Data: "c"},
1631 {Type: TokenPunctuator, Data: ";;"},
1632 {Type: TokenKeyword, Data: "esac"},
1633 {Type: parser.TokenDone, Data: ""},
1634 },
1635 },
1636 { // 101
1637 "case a in b;;esac",
1638 []parser.Token{
1639 {Type: TokenKeyword, Data: "case"},
1640 {Type: TokenWhitespace, Data: " "},
1641 {Type: TokenWord, Data: "a"},
1642 {Type: TokenWhitespace, Data: " "},
1643 {Type: TokenKeyword, Data: "in"},
1644 {Type: TokenWhitespace, Data: " "},
1645 {Type: TokenWord, Data: "b"},
1646 {Type: parser.TokenError, Data: "invalid character"},
1647 },
1648 },
1649 { // 102
1650 "case a in esac",
1651 []parser.Token{
1652 {Type: TokenKeyword, Data: "case"},
1653 {Type: TokenWhitespace, Data: " "},
1654 {Type: TokenWord, Data: "a"},
1655 {Type: TokenWhitespace, Data: " "},
1656 {Type: TokenKeyword, Data: "in"},
1657 {Type: TokenWhitespace, Data: " "},
1658 {Type: TokenKeyword, Data: "esac"},
1659 {Type: parser.TokenDone, Data: ""},
1660 },
1661 },
1662 { // 103
1663 "case a in #comment\nesac",
1664 []parser.Token{
1665 {Type: TokenKeyword, Data: "case"},
1666 {Type: TokenWhitespace, Data: " "},
1667 {Type: TokenWord, Data: "a"},
1668 {Type: TokenWhitespace, Data: " "},
1669 {Type: TokenKeyword, Data: "in"},
1670 {Type: TokenWhitespace, Data: " "},
1671 {Type: TokenComment, Data: "#comment"},
1672 {Type: TokenLineTerminator, Data: "\n"},
1673 {Type: TokenKeyword, Data: "esac"},
1674 {Type: parser.TokenDone, Data: ""},
1675 },
1676 },
1677 { // 104
1678 "case a in b)c;;d)e;&f)g;;&h)i\nesac",
1679 []parser.Token{
1680 {Type: TokenKeyword, Data: "case"},
1681 {Type: TokenWhitespace, Data: " "},
1682 {Type: TokenWord, Data: "a"},
1683 {Type: TokenWhitespace, Data: " "},
1684 {Type: TokenKeyword, Data: "in"},
1685 {Type: TokenWhitespace, Data: " "},
1686 {Type: TokenWord, Data: "b"},
1687 {Type: TokenPunctuator, Data: ")"},
1688 {Type: TokenWord, Data: "c"},
1689 {Type: TokenPunctuator, Data: ";;"},
1690 {Type: TokenWord, Data: "d"},
1691 {Type: TokenPunctuator, Data: ")"},
1692 {Type: TokenWord, Data: "e"},
1693 {Type: TokenPunctuator, Data: ";&"},
1694 {Type: TokenWord, Data: "f"},
1695 {Type: TokenPunctuator, Data: ")"},
1696 {Type: TokenWord, Data: "g"},
1697 {Type: TokenPunctuator, Data: ";;&"},
1698 {Type: TokenWord, Data: "h"},
1699 {Type: TokenPunctuator, Data: ")"},
1700 {Type: TokenWord, Data: "i"},
1701 {Type: TokenLineTerminator, Data: "\n"},
1702 {Type: TokenKeyword, Data: "esac"},
1703 {Type: parser.TokenDone, Data: ""},
1704 },
1705 },
1706 { // 105
1707 "case a in b) #comment\nc;; #comment\nd)e;&f)g\n#comment\nesac",
1708 []parser.Token{
1709 {Type: TokenKeyword, Data: "case"},
1710 {Type: TokenWhitespace, Data: " "},
1711 {Type: TokenWord, Data: "a"},
1712 {Type: TokenWhitespace, Data: " "},
1713 {Type: TokenKeyword, Data: "in"},
1714 {Type: TokenWhitespace, Data: " "},
1715 {Type: TokenWord, Data: "b"},
1716 {Type: TokenPunctuator, Data: ")"},
1717 {Type: TokenWhitespace, Data: " "},
1718 {Type: TokenComment, Data: "#comment"},
1719 {Type: TokenLineTerminator, Data: "\n"},
1720 {Type: TokenWord, Data: "c"},
1721 {Type: TokenPunctuator, Data: ";;"},
1722 {Type: TokenWhitespace, Data: " "},
1723 {Type: TokenComment, Data: "#comment"},
1724 {Type: TokenLineTerminator, Data: "\n"},
1725 {Type: TokenWord, Data: "d"},
1726 {Type: TokenPunctuator, Data: ")"},
1727 {Type: TokenWord, Data: "e"},
1728 {Type: TokenPunctuator, Data: ";&"},
1729 {Type: TokenWord, Data: "f"},
1730 {Type: TokenPunctuator, Data: ")"},
1731 {Type: TokenWord, Data: "g"},
1732 {Type: TokenLineTerminator, Data: "\n"},
1733 {Type: TokenComment, Data: "#comment"},
1734 {Type: TokenLineTerminator, Data: "\n"},
1735 {Type: TokenKeyword, Data: "esac"},
1736 {Type: parser.TokenDone, Data: ""},
1737 },
1738 },
1739 { // 106
1740 "case a b)c;;esac",
1741 []parser.Token{
1742 {Type: TokenKeyword, Data: "case"},
1743 {Type: TokenWhitespace, Data: " "},
1744 {Type: TokenWord, Data: "a"},
1745 {Type: TokenWhitespace, Data: " "},
1746 {Type: parser.TokenError, Data: "missing in"},
1747 },
1748 },
1749 { // 107
1750 "case a in b)c;;",
1751 []parser.Token{
1752 {Type: TokenKeyword, Data: "case"},
1753 {Type: TokenWhitespace, Data: " "},
1754 {Type: TokenWord, Data: "a"},
1755 {Type: TokenWhitespace, Data: " "},
1756 {Type: TokenKeyword, Data: "in"},
1757 {Type: TokenWhitespace, Data: " "},
1758 {Type: TokenWord, Data: "b"},
1759 {Type: TokenPunctuator, Data: ")"},
1760 {Type: TokenWord, Data: "c"},
1761 {Type: TokenPunctuator, Data: ";;"},
1762 {Type: parser.TokenError, Data: "unexpected EOF"},
1763 },
1764 },
1765 { // 108
1766 "if a; then b; fi",
1767 []parser.Token{
1768 {Type: TokenKeyword, Data: "if"},
1769 {Type: TokenWhitespace, Data: " "},
1770 {Type: TokenWord, Data: "a"},
1771 {Type: TokenPunctuator, Data: ";"},
1772 {Type: TokenWhitespace, Data: " "},
1773 {Type: TokenKeyword, Data: "then"},
1774 {Type: TokenWhitespace, Data: " "},
1775 {Type: TokenWord, Data: "b"},
1776 {Type: TokenPunctuator, Data: ";"},
1777 {Type: TokenWhitespace, Data: " "},
1778 {Type: TokenKeyword, Data: "fi"},
1779 {Type: parser.TokenDone, Data: ""},
1780 },
1781 },
1782 { // 109
1783 "if a;\nthen\nb\nfi",
1784 []parser.Token{
1785 {Type: TokenKeyword, Data: "if"},
1786 {Type: TokenWhitespace, Data: " "},
1787 {Type: TokenWord, Data: "a"},
1788 {Type: TokenPunctuator, Data: ";"},
1789 {Type: TokenLineTerminator, Data: "\n"},
1790 {Type: TokenKeyword, Data: "then"},
1791 {Type: TokenLineTerminator, Data: "\n"},
1792 {Type: TokenWord, Data: "b"},
1793 {Type: TokenLineTerminator, Data: "\n"},
1794 {Type: TokenKeyword, Data: "fi"},
1795 {Type: parser.TokenDone, Data: ""},
1796 },
1797 },
1798 { // 110
1799 "if a && b || c & then d; fi",
1800 []parser.Token{
1801 {Type: TokenKeyword, Data: "if"},
1802 {Type: TokenWhitespace, Data: " "},
1803 {Type: TokenWord, Data: "a"},
1804 {Type: TokenWhitespace, Data: " "},
1805 {Type: TokenPunctuator, Data: "&&"},
1806 {Type: TokenWhitespace, Data: " "},
1807 {Type: TokenWord, Data: "b"},
1808 {Type: TokenWhitespace, Data: " "},
1809 {Type: TokenPunctuator, Data: "||"},
1810 {Type: TokenWhitespace, Data: " "},
1811 {Type: TokenWord, Data: "c"},
1812 {Type: TokenWhitespace, Data: " "},
1813 {Type: TokenPunctuator, Data: "&"},
1814 {Type: TokenWhitespace, Data: " "},
1815 {Type: TokenKeyword, Data: "then"},
1816 {Type: TokenWhitespace, Data: " "},
1817 {Type: TokenWord, Data: "d"},
1818 {Type: TokenPunctuator, Data: ";"},
1819 {Type: TokenWhitespace, Data: " "},
1820 {Type: TokenKeyword, Data: "fi"},
1821 {Type: parser.TokenDone, Data: ""},
1822 },
1823 },
1824 { // 111
1825 "if a; then b; else c; fi",
1826 []parser.Token{
1827 {Type: TokenKeyword, Data: "if"},
1828 {Type: TokenWhitespace, Data: " "},
1829 {Type: TokenWord, Data: "a"},
1830 {Type: TokenPunctuator, Data: ";"},
1831 {Type: TokenWhitespace, Data: " "},
1832 {Type: TokenKeyword, Data: "then"},
1833 {Type: TokenWhitespace, Data: " "},
1834 {Type: TokenWord, Data: "b"},
1835 {Type: TokenPunctuator, Data: ";"},
1836 {Type: TokenWhitespace, Data: " "},
1837 {Type: TokenKeyword, Data: "else"},
1838 {Type: TokenWhitespace, Data: " "},
1839 {Type: TokenWord, Data: "c"},
1840 {Type: TokenPunctuator, Data: ";"},
1841 {Type: TokenWhitespace, Data: " "},
1842 {Type: TokenKeyword, Data: "fi"},
1843 {Type: parser.TokenDone, Data: ""},
1844 },
1845 },
1846 { // 112
1847 "if (a) then b; else c; fi",
1848 []parser.Token{
1849 {Type: TokenKeyword, Data: "if"},
1850 {Type: TokenWhitespace, Data: " "},
1851 {Type: TokenPunctuator, Data: "("},
1852 {Type: TokenWord, Data: "a"},
1853 {Type: TokenPunctuator, Data: ")"},
1854 {Type: TokenWhitespace, Data: " "},
1855 {Type: TokenKeyword, Data: "then"},
1856 {Type: TokenWhitespace, Data: " "},
1857 {Type: TokenWord, Data: "b"},
1858 {Type: TokenPunctuator, Data: ";"},
1859 {Type: TokenWhitespace, Data: " "},
1860 {Type: TokenKeyword, Data: "else"},
1861 {Type: TokenWhitespace, Data: " "},
1862 {Type: TokenWord, Data: "c"},
1863 {Type: TokenPunctuator, Data: ";"},
1864 {Type: TokenWhitespace, Data: " "},
1865 {Type: TokenKeyword, Data: "fi"},
1866 {Type: parser.TokenDone, Data: ""},
1867 },
1868 },
1869 { // 113
1870 "if { a; } then b; else c; fi",
1871 []parser.Token{
1872 {Type: TokenKeyword, Data: "if"},
1873 {Type: TokenWhitespace, Data: " "},
1874 {Type: TokenPunctuator, Data: "{"},
1875 {Type: TokenWhitespace, Data: " "},
1876 {Type: TokenWord, Data: "a"},
1877 {Type: TokenPunctuator, Data: ";"},
1878 {Type: TokenWhitespace, Data: " "},
1879 {Type: TokenPunctuator, Data: "}"},
1880 {Type: TokenWhitespace, Data: " "},
1881 {Type: TokenKeyword, Data: "then"},
1882 {Type: TokenWhitespace, Data: " "},
1883 {Type: TokenWord, Data: "b"},
1884 {Type: TokenPunctuator, Data: ";"},
1885 {Type: TokenWhitespace, Data: " "},
1886 {Type: TokenKeyword, Data: "else"},
1887 {Type: TokenWhitespace, Data: " "},
1888 {Type: TokenWord, Data: "c"},
1889 {Type: TokenPunctuator, Data: ";"},
1890 {Type: TokenWhitespace, Data: " "},
1891 {Type: TokenKeyword, Data: "fi"},
1892 {Type: parser.TokenDone, Data: ""},
1893 },
1894 },
1895 { // 114
1896 "if a #comment\n then b; else #comment\nc; fi",
1897 []parser.Token{
1898 {Type: TokenKeyword, Data: "if"},
1899 {Type: TokenWhitespace, Data: " "},
1900 {Type: TokenWord, Data: "a"},
1901 {Type: TokenWhitespace, Data: " "},
1902 {Type: TokenComment, Data: "#comment"},
1903 {Type: TokenLineTerminator, Data: "\n"},
1904 {Type: TokenWhitespace, Data: " "},
1905 {Type: TokenKeyword, Data: "then"},
1906 {Type: TokenWhitespace, Data: " "},
1907 {Type: TokenWord, Data: "b"},
1908 {Type: TokenPunctuator, Data: ";"},
1909 {Type: TokenWhitespace, Data: " "},
1910 {Type: TokenKeyword, Data: "else"},
1911 {Type: TokenWhitespace, Data: " "},
1912 {Type: TokenComment, Data: "#comment"},
1913 {Type: TokenLineTerminator, Data: "\n"},
1914 {Type: TokenWord, Data: "c"},
1915 {Type: TokenPunctuator, Data: ";"},
1916 {Type: TokenWhitespace, Data: " "},
1917 {Type: TokenKeyword, Data: "fi"},
1918 {Type: parser.TokenDone, Data: ""},
1919 },
1920 },
1921 { // 115
1922 "if a\n#comment\n then b; else\n#comment\nc; fi",
1923 []parser.Token{
1924 {Type: TokenKeyword, Data: "if"},
1925 {Type: TokenWhitespace, Data: " "},
1926 {Type: TokenWord, Data: "a"},
1927 {Type: TokenLineTerminator, Data: "\n"},
1928 {Type: TokenComment, Data: "#comment"},
1929 {Type: TokenLineTerminator, Data: "\n"},
1930 {Type: TokenWhitespace, Data: " "},
1931 {Type: TokenKeyword, Data: "then"},
1932 {Type: TokenWhitespace, Data: " "},
1933 {Type: TokenWord, Data: "b"},
1934 {Type: TokenPunctuator, Data: ";"},
1935 {Type: TokenWhitespace, Data: " "},
1936 {Type: TokenKeyword, Data: "else"},
1937 {Type: TokenLineTerminator, Data: "\n"},
1938 {Type: TokenComment, Data: "#comment"},
1939 {Type: TokenLineTerminator, Data: "\n"},
1940 {Type: TokenWord, Data: "c"},
1941 {Type: TokenPunctuator, Data: ";"},
1942 {Type: TokenWhitespace, Data: " "},
1943 {Type: TokenKeyword, Data: "fi"},
1944 {Type: parser.TokenDone, Data: ""},
1945 },
1946 },
1947 { // 116
1948 "if a; then b; elif c; then d; else if e; then f; fi; fi",
1949 []parser.Token{
1950 {Type: TokenKeyword, Data: "if"},
1951 {Type: TokenWhitespace, Data: " "},
1952 {Type: TokenWord, Data: "a"},
1953 {Type: TokenPunctuator, Data: ";"},
1954 {Type: TokenWhitespace, Data: " "},
1955 {Type: TokenKeyword, Data: "then"},
1956 {Type: TokenWhitespace, Data: " "},
1957 {Type: TokenWord, Data: "b"},
1958 {Type: TokenPunctuator, Data: ";"},
1959 {Type: TokenWhitespace, Data: " "},
1960 {Type: TokenKeyword, Data: "elif"},
1961 {Type: TokenWhitespace, Data: " "},
1962 {Type: TokenWord, Data: "c"},
1963 {Type: TokenPunctuator, Data: ";"},
1964 {Type: TokenWhitespace, Data: " "},
1965 {Type: TokenKeyword, Data: "then"},
1966 {Type: TokenWhitespace, Data: " "},
1967 {Type: TokenWord, Data: "d"},
1968 {Type: TokenPunctuator, Data: ";"},
1969 {Type: TokenWhitespace, Data: " "},
1970 {Type: TokenKeyword, Data: "else"},
1971 {Type: TokenWhitespace, Data: " "},
1972 {Type: TokenKeyword, Data: "if"},
1973 {Type: TokenWhitespace, Data: " "},
1974 {Type: TokenWord, Data: "e"},
1975 {Type: TokenPunctuator, Data: ";"},
1976 {Type: TokenWhitespace, Data: " "},
1977 {Type: TokenKeyword, Data: "then"},
1978 {Type: TokenWhitespace, Data: " "},
1979 {Type: TokenWord, Data: "f"},
1980 {Type: TokenPunctuator, Data: ";"},
1981 {Type: TokenWhitespace, Data: " "},
1982 {Type: TokenKeyword, Data: "fi"},
1983 {Type: TokenPunctuator, Data: ";"},
1984 {Type: TokenWhitespace, Data: " "},
1985 {Type: TokenKeyword, Data: "fi"},
1986 {Type: parser.TokenDone, Data: ""},
1987 },
1988 },
1989 { // 117
1990 "while a; do b; done",
1991 []parser.Token{
1992 {Type: TokenKeyword, Data: "while"},
1993 {Type: TokenWhitespace, Data: " "},
1994 {Type: TokenWord, Data: "a"},
1995 {Type: TokenPunctuator, Data: ";"},
1996 {Type: TokenWhitespace, Data: " "},
1997 {Type: TokenKeyword, Data: "do"},
1998 {Type: TokenWhitespace, Data: " "},
1999 {Type: TokenWord, Data: "b"},
2000 {Type: TokenPunctuator, Data: ";"},
2001 {Type: TokenWhitespace, Data: " "},
2002 {Type: TokenKeyword, Data: "done"},
2003 {Type: parser.TokenDone, Data: ""},
2004 },
2005 },
2006 { // 118
2007 "while (a) do b; done",
2008 []parser.Token{
2009 {Type: TokenKeyword, Data: "while"},
2010 {Type: TokenWhitespace, Data: " "},
2011 {Type: TokenPunctuator, Data: "("},
2012 {Type: TokenWord, Data: "a"},
2013 {Type: TokenPunctuator, Data: ")"},
2014 {Type: TokenWhitespace, Data: " "},
2015 {Type: TokenKeyword, Data: "do"},
2016 {Type: TokenWhitespace, Data: " "},
2017 {Type: TokenWord, Data: "b"},
2018 {Type: TokenPunctuator, Data: ";"},
2019 {Type: TokenWhitespace, Data: " "},
2020 {Type: TokenKeyword, Data: "done"},
2021 {Type: parser.TokenDone, Data: ""},
2022 },
2023 },
2024 { // 119
2025 "until (a);do b; done",
2026 []parser.Token{
2027 {Type: TokenKeyword, Data: "until"},
2028 {Type: TokenWhitespace, Data: " "},
2029 {Type: TokenPunctuator, Data: "("},
2030 {Type: TokenWord, Data: "a"},
2031 {Type: TokenPunctuator, Data: ")"},
2032 {Type: TokenPunctuator, Data: ";"},
2033 {Type: TokenKeyword, Data: "do"},
2034 {Type: TokenWhitespace, Data: " "},
2035 {Type: TokenWord, Data: "b"},
2036 {Type: TokenPunctuator, Data: ";"},
2037 {Type: TokenWhitespace, Data: " "},
2038 {Type: TokenKeyword, Data: "done"},
2039 {Type: parser.TokenDone, Data: ""},
2040 },
2041 },
2042 { // 120
2043 "while a; #comment\ndo b; done",
2044 []parser.Token{
2045 {Type: TokenKeyword, Data: "while"},
2046 {Type: TokenWhitespace, Data: " "},
2047 {Type: TokenWord, Data: "a"},
2048 {Type: TokenPunctuator, Data: ";"},
2049 {Type: TokenWhitespace, Data: " "},
2050 {Type: TokenComment, Data: "#comment"},
2051 {Type: TokenLineTerminator, Data: "\n"},
2052 {Type: TokenKeyword, Data: "do"},
2053 {Type: TokenWhitespace, Data: " "},
2054 {Type: TokenWord, Data: "b"},
2055 {Type: TokenPunctuator, Data: ";"},
2056 {Type: TokenWhitespace, Data: " "},
2057 {Type: TokenKeyword, Data: "done"},
2058 {Type: parser.TokenDone, Data: ""},
2059 },
2060 },
2061 { // 121
2062 "until a && b || c & do b; done",
2063 []parser.Token{
2064 {Type: TokenKeyword, Data: "until"},
2065 {Type: TokenWhitespace, Data: " "},
2066 {Type: TokenWord, Data: "a"},
2067 {Type: TokenWhitespace, Data: " "},
2068 {Type: TokenPunctuator, Data: "&&"},
2069 {Type: TokenWhitespace, Data: " "},
2070 {Type: TokenWord, Data: "b"},
2071 {Type: TokenWhitespace, Data: " "},
2072 {Type: TokenPunctuator, Data: "||"},
2073 {Type: TokenWhitespace, Data: " "},
2074 {Type: TokenWord, Data: "c"},
2075 {Type: TokenWhitespace, Data: " "},
2076 {Type: TokenPunctuator, Data: "&"},
2077 {Type: TokenWhitespace, Data: " "},
2078 {Type: TokenKeyword, Data: "do"},
2079 {Type: TokenWhitespace, Data: " "},
2080 {Type: TokenWord, Data: "b"},
2081 {Type: TokenPunctuator, Data: ";"},
2082 {Type: TokenWhitespace, Data: " "},
2083 {Type: TokenKeyword, Data: "done"},
2084 {Type: parser.TokenDone, Data: ""},
2085 },
2086 },
2087 { // 122
2088 "while a; do break; done",
2089 []parser.Token{
2090 {Type: TokenKeyword, Data: "while"},
2091 {Type: TokenWhitespace, Data: " "},
2092 {Type: TokenWord, Data: "a"},
2093 {Type: TokenPunctuator, Data: ";"},
2094 {Type: TokenWhitespace, Data: " "},
2095 {Type: TokenKeyword, Data: "do"},
2096 {Type: TokenWhitespace, Data: " "},
2097 {Type: TokenKeyword, Data: "break"},
2098 {Type: TokenPunctuator, Data: ";"},
2099 {Type: TokenWhitespace, Data: " "},
2100 {Type: TokenKeyword, Data: "done"},
2101 {Type: parser.TokenDone, Data: ""},
2102 },
2103 },
2104 { // 123
2105 "until a; do continue; done",
2106 []parser.Token{
2107 {Type: TokenKeyword, Data: "until"},
2108 {Type: TokenWhitespace, Data: " "},
2109 {Type: TokenWord, Data: "a"},
2110 {Type: TokenPunctuator, Data: ";"},
2111 {Type: TokenWhitespace, Data: " "},
2112 {Type: TokenKeyword, Data: "do"},
2113 {Type: TokenWhitespace, Data: " "},
2114 {Type: TokenKeyword, Data: "continue"},
2115 {Type: TokenPunctuator, Data: ";"},
2116 {Type: TokenWhitespace, Data: " "},
2117 {Type: TokenKeyword, Data: "done"},
2118 {Type: parser.TokenDone, Data: ""},
2119 },
2120 },
2121 { // 124
2122 "while a; do if b; then continue; fi; done",
2123 []parser.Token{
2124 {Type: TokenKeyword, Data: "while"},
2125 {Type: TokenWhitespace, Data: " "},
2126 {Type: TokenWord, Data: "a"},
2127 {Type: TokenPunctuator, Data: ";"},
2128 {Type: TokenWhitespace, Data: " "},
2129 {Type: TokenKeyword, Data: "do"},
2130 {Type: TokenWhitespace, Data: " "},
2131 {Type: TokenKeyword, Data: "if"},
2132 {Type: TokenWhitespace, Data: " "},
2133 {Type: TokenWord, Data: "b"},
2134 {Type: TokenPunctuator, Data: ";"},
2135 {Type: TokenWhitespace, Data: " "},
2136 {Type: TokenKeyword, Data: "then"},
2137 {Type: TokenWhitespace, Data: " "},
2138 {Type: TokenKeyword, Data: "continue"},
2139 {Type: TokenPunctuator, Data: ";"},
2140 {Type: TokenWhitespace, Data: " "},
2141 {Type: TokenKeyword, Data: "fi"},
2142 {Type: TokenPunctuator, Data: ";"},
2143 {Type: TokenWhitespace, Data: " "},
2144 {Type: TokenKeyword, Data: "done"},
2145 {Type: parser.TokenDone, Data: ""},
2146 },
2147 },
2148 { // 125
2149 "select a; do case b in c) break; esac; done",
2150 []parser.Token{
2151 {Type: TokenKeyword, Data: "select"},
2152 {Type: TokenWhitespace, Data: " "},
2153 {Type: TokenIdentifier, Data: "a"},
2154 {Type: TokenPunctuator, Data: ";"},
2155 {Type: TokenWhitespace, Data: " "},
2156 {Type: TokenKeyword, Data: "do"},
2157 {Type: TokenWhitespace, Data: " "},
2158 {Type: TokenKeyword, Data: "case"},
2159 {Type: TokenWhitespace, Data: " "},
2160 {Type: TokenWord, Data: "b"},
2161 {Type: TokenWhitespace, Data: " "},
2162 {Type: TokenKeyword, Data: "in"},
2163 {Type: TokenWhitespace, Data: " "},
2164 {Type: TokenWord, Data: "c"},
2165 {Type: TokenPunctuator, Data: ")"},
2166 {Type: TokenWhitespace, Data: " "},
2167 {Type: TokenKeyword, Data: "break"},
2168 {Type: TokenPunctuator, Data: ";"},
2169 {Type: TokenWhitespace, Data: " "},
2170 {Type: TokenKeyword, Data: "esac"},
2171 {Type: TokenPunctuator, Data: ";"},
2172 {Type: TokenWhitespace, Data: " "},
2173 {Type: TokenKeyword, Data: "done"},
2174 {Type: parser.TokenDone, Data: ""},
2175 },
2176 },
2177 { // 126
2178 "break",
2179 []parser.Token{
2180 {Type: parser.TokenError, Data: "invalid keyword"},
2181 },
2182 },
2183 { // 127
2184 "continue",
2185 []parser.Token{
2186 {Type: parser.TokenError, Data: "invalid keyword"},
2187 },
2188 },
2189 { // 128
2190 "for a; do b; done",
2191 []parser.Token{
2192 {Type: TokenKeyword, Data: "for"},
2193 {Type: TokenWhitespace, Data: " "},
2194 {Type: TokenIdentifier, Data: "a"},
2195 {Type: TokenPunctuator, Data: ";"},
2196 {Type: TokenWhitespace, Data: " "},
2197 {Type: TokenKeyword, Data: "do"},
2198 {Type: TokenWhitespace, Data: " "},
2199 {Type: TokenWord, Data: "b"},
2200 {Type: TokenPunctuator, Data: ";"},
2201 {Type: TokenWhitespace, Data: " "},
2202 {Type: TokenKeyword, Data: "done"},
2203 {Type: parser.TokenDone, Data: ""},
2204 },
2205 },
2206 { // 129
2207 "for a #comment\ndo b; done",
2208 []parser.Token{
2209 {Type: TokenKeyword, Data: "for"},
2210 {Type: TokenWhitespace, Data: " "},
2211 {Type: TokenIdentifier, Data: "a"},
2212 {Type: TokenWhitespace, Data: " "},
2213 {Type: TokenComment, Data: "#comment"},
2214 {Type: TokenLineTerminator, Data: "\n"},
2215 {Type: TokenKeyword, Data: "do"},
2216 {Type: TokenWhitespace, Data: " "},
2217 {Type: TokenWord, Data: "b"},
2218 {Type: TokenPunctuator, Data: ";"},
2219 {Type: TokenWhitespace, Data: " "},
2220 {Type: TokenKeyword, Data: "done"},
2221 {Type: parser.TokenDone, Data: ""},
2222 },
2223 },
2224 { // 130
2225 "for a do b; done",
2226 []parser.Token{
2227 {Type: TokenKeyword, Data: "for"},
2228 {Type: TokenWhitespace, Data: " "},
2229 {Type: TokenIdentifier, Data: "a"},
2230 {Type: TokenWhitespace, Data: " "},
2231 {Type: TokenKeyword, Data: "do"},
2232 {Type: TokenWhitespace, Data: " "},
2233 {Type: TokenWord, Data: "b"},
2234 {Type: TokenPunctuator, Data: ";"},
2235 {Type: TokenWhitespace, Data: " "},
2236 {Type: TokenKeyword, Data: "done"},
2237 {Type: parser.TokenDone, Data: ""},
2238 },
2239 },
2240 { // 131
2241 "for a\ndo b; done",
2242 []parser.Token{
2243 {Type: TokenKeyword, Data: "for"},
2244 {Type: TokenWhitespace, Data: " "},
2245 {Type: TokenIdentifier, Data: "a"},
2246 {Type: TokenLineTerminator, Data: "\n"},
2247 {Type: TokenKeyword, Data: "do"},
2248 {Type: TokenWhitespace, Data: " "},
2249 {Type: TokenWord, Data: "b"},
2250 {Type: TokenPunctuator, Data: ";"},
2251 {Type: TokenWhitespace, Data: " "},
2252 {Type: TokenKeyword, Data: "done"},
2253 {Type: parser.TokenDone, Data: ""},
2254 },
2255 },
2256 { // 132
2257 "for a in 1 2 3; do b; done",
2258 []parser.Token{
2259 {Type: TokenKeyword, Data: "for"},
2260 {Type: TokenWhitespace, Data: " "},
2261 {Type: TokenIdentifier, Data: "a"},
2262 {Type: TokenWhitespace, Data: " "},
2263 {Type: TokenKeyword, Data: "in"},
2264 {Type: TokenWhitespace, Data: " "},
2265 {Type: TokenWord, Data: "1"},
2266 {Type: TokenWhitespace, Data: " "},
2267 {Type: TokenWord, Data: "2"},
2268 {Type: TokenWhitespace, Data: " "},
2269 {Type: TokenWord, Data: "3"},
2270 {Type: TokenPunctuator, Data: ";"},
2271 {Type: TokenWhitespace, Data: " "},
2272 {Type: TokenKeyword, Data: "do"},
2273 {Type: TokenWhitespace, Data: " "},
2274 {Type: TokenWord, Data: "b"},
2275 {Type: TokenPunctuator, Data: ";"},
2276 {Type: TokenWhitespace, Data: " "},
2277 {Type: TokenKeyword, Data: "done"},
2278 {Type: parser.TokenDone, Data: ""},
2279 },
2280 },
2281 { // 133
2282 "for a in 1 2 3 #comment\ndo b; done",
2283 []parser.Token{
2284 {Type: TokenKeyword, Data: "for"},
2285 {Type: TokenWhitespace, Data: " "},
2286 {Type: TokenIdentifier, Data: "a"},
2287 {Type: TokenWhitespace, Data: " "},
2288 {Type: TokenKeyword, Data: "in"},
2289 {Type: TokenWhitespace, Data: " "},
2290 {Type: TokenWord, Data: "1"},
2291 {Type: TokenWhitespace, Data: " "},
2292 {Type: TokenWord, Data: "2"},
2293 {Type: TokenWhitespace, Data: " "},
2294 {Type: TokenWord, Data: "3"},
2295 {Type: TokenWhitespace, Data: " "},
2296 {Type: TokenComment, Data: "#comment"},
2297 {Type: TokenLineTerminator, Data: "\n"},
2298 {Type: TokenKeyword, Data: "do"},
2299 {Type: TokenWhitespace, Data: " "},
2300 {Type: TokenWord, Data: "b"},
2301 {Type: TokenPunctuator, Data: ";"},
2302 {Type: TokenWhitespace, Data: " "},
2303 {Type: TokenKeyword, Data: "done"},
2304 {Type: parser.TokenDone, Data: ""},
2305 },
2306 },
2307 { // 134
2308 "for a #comment\nin 1 2 3\ndo b; done",
2309 []parser.Token{
2310 {Type: TokenKeyword, Data: "for"},
2311 {Type: TokenWhitespace, Data: " "},
2312 {Type: TokenIdentifier, Data: "a"},
2313 {Type: TokenWhitespace, Data: " "},
2314 {Type: TokenComment, Data: "#comment"},
2315 {Type: TokenLineTerminator, Data: "\n"},
2316 {Type: TokenKeyword, Data: "in"},
2317 {Type: TokenWhitespace, Data: " "},
2318 {Type: TokenWord, Data: "1"},
2319 {Type: TokenWhitespace, Data: " "},
2320 {Type: TokenWord, Data: "2"},
2321 {Type: TokenWhitespace, Data: " "},
2322 {Type: TokenWord, Data: "3"},
2323 {Type: TokenLineTerminator, Data: "\n"},
2324 {Type: TokenKeyword, Data: "do"},
2325 {Type: TokenWhitespace, Data: " "},
2326 {Type: TokenWord, Data: "b"},
2327 {Type: TokenPunctuator, Data: ";"},
2328 {Type: TokenWhitespace, Data: " "},
2329 {Type: TokenKeyword, Data: "done"},
2330 {Type: parser.TokenDone, Data: ""},
2331 },
2332 },
2333 { // 135
2334 "for % in 1 2 3; do b; done",
2335 []parser.Token{
2336 {Type: TokenKeyword, Data: "for"},
2337 {Type: TokenWhitespace, Data: " "},
2338 {Type: parser.TokenError, Data: "invalid identifier"},
2339 },
2340 },
2341 { // 136
2342 "for a in 1 2 3 do b; done",
2343 []parser.Token{
2344 {Type: TokenKeyword, Data: "for"},
2345 {Type: TokenWhitespace, Data: " "},
2346 {Type: TokenIdentifier, Data: "a"},
2347 {Type: TokenWhitespace, Data: " "},
2348 {Type: TokenKeyword, Data: "in"},
2349 {Type: TokenWhitespace, Data: " "},
2350 {Type: TokenWord, Data: "1"},
2351 {Type: TokenWhitespace, Data: " "},
2352 {Type: TokenWord, Data: "2"},
2353 {Type: TokenWhitespace, Data: " "},
2354 {Type: TokenWord, Data: "3"},
2355 {Type: TokenWhitespace, Data: " "},
2356 {Type: TokenWord, Data: "do"},
2357 {Type: TokenWhitespace, Data: " "},
2358 {Type: TokenWord, Data: "b"},
2359 {Type: TokenPunctuator, Data: ";"},
2360 {Type: TokenWhitespace, Data: " "},
2361 {Type: parser.TokenError, Data: "missing do"},
2362 },
2363 },
2364 { // 137
2365 "for ((a=1;a<2;a++)) do b; done",
2366 []parser.Token{
2367 {Type: TokenKeyword, Data: "for"},
2368 {Type: TokenWhitespace, Data: " "},
2369 {Type: TokenPunctuator, Data: "(("},
2370 {Type: TokenWord, Data: "a"},
2371 {Type: TokenPunctuator, Data: "="},
2372 {Type: TokenNumberLiteral, Data: "1"},
2373 {Type: TokenPunctuator, Data: ";"},
2374 {Type: TokenWord, Data: "a"},
2375 {Type: TokenPunctuator, Data: "<"},
2376 {Type: TokenNumberLiteral, Data: "2"},
2377 {Type: TokenPunctuator, Data: ";"},
2378 {Type: TokenWord, Data: "a"},
2379 {Type: TokenPunctuator, Data: "++"},
2380 {Type: TokenPunctuator, Data: "))"},
2381 {Type: TokenWhitespace, Data: " "},
2382 {Type: TokenKeyword, Data: "do"},
2383 {Type: TokenWhitespace, Data: " "},
2384 {Type: TokenWord, Data: "b"},
2385 {Type: TokenPunctuator, Data: ";"},
2386 {Type: TokenWhitespace, Data: " "},
2387 {Type: TokenKeyword, Data: "done"},
2388 {Type: parser.TokenDone, Data: ""},
2389 },
2390 },
2391 { // 138
2392 "for ((a=1;a<2;a++)); do b; done",
2393 []parser.Token{
2394 {Type: TokenKeyword, Data: "for"},
2395 {Type: TokenWhitespace, Data: " "},
2396 {Type: TokenPunctuator, Data: "(("},
2397 {Type: TokenWord, Data: "a"},
2398 {Type: TokenPunctuator, Data: "="},
2399 {Type: TokenNumberLiteral, Data: "1"},
2400 {Type: TokenPunctuator, Data: ";"},
2401 {Type: TokenWord, Data: "a"},
2402 {Type: TokenPunctuator, Data: "<"},
2403 {Type: TokenNumberLiteral, Data: "2"},
2404 {Type: TokenPunctuator, Data: ";"},
2405 {Type: TokenWord, Data: "a"},
2406 {Type: TokenPunctuator, Data: "++"},
2407 {Type: TokenPunctuator, Data: "))"},
2408 {Type: TokenPunctuator, Data: ";"},
2409 {Type: TokenWhitespace, Data: " "},
2410 {Type: TokenKeyword, Data: "do"},
2411 {Type: TokenWhitespace, Data: " "},
2412 {Type: TokenWord, Data: "b"},
2413 {Type: TokenPunctuator, Data: ";"},
2414 {Type: TokenWhitespace, Data: " "},
2415 {Type: TokenKeyword, Data: "done"},
2416 {Type: parser.TokenDone, Data: ""},
2417 },
2418 },
2419 { // 139
2420 "for ( a=1;a<2;a++ ); do b; done",
2421 []parser.Token{
2422 {Type: TokenKeyword, Data: "for"},
2423 {Type: TokenWhitespace, Data: " "},
2424 {Type: parser.TokenError, Data: "invalid character"},
2425 },
2426 },
2427 { // 140
2428 "select a; do b; done",
2429 []parser.Token{
2430 {Type: TokenKeyword, Data: "select"},
2431 {Type: TokenWhitespace, Data: " "},
2432 {Type: TokenIdentifier, Data: "a"},
2433 {Type: TokenPunctuator, Data: ";"},
2434 {Type: TokenWhitespace, Data: " "},
2435 {Type: TokenKeyword, Data: "do"},
2436 {Type: TokenWhitespace, Data: " "},
2437 {Type: TokenWord, Data: "b"},
2438 {Type: TokenPunctuator, Data: ";"},
2439 {Type: TokenWhitespace, Data: " "},
2440 {Type: TokenKeyword, Data: "done"},
2441 {Type: parser.TokenDone, Data: ""},
2442 },
2443 },
2444 { // 141
2445 "select a do b; done",
2446 []parser.Token{
2447 {Type: TokenKeyword, Data: "select"},
2448 {Type: TokenWhitespace, Data: " "},
2449 {Type: TokenIdentifier, Data: "a"},
2450 {Type: TokenWhitespace, Data: " "},
2451 {Type: TokenKeyword, Data: "do"},
2452 {Type: TokenWhitespace, Data: " "},
2453 {Type: TokenWord, Data: "b"},
2454 {Type: TokenPunctuator, Data: ";"},
2455 {Type: TokenWhitespace, Data: " "},
2456 {Type: TokenKeyword, Data: "done"},
2457 {Type: parser.TokenDone, Data: ""},
2458 },
2459 },
2460 { // 142
2461 "select a\ndo b; done",
2462 []parser.Token{
2463 {Type: TokenKeyword, Data: "select"},
2464 {Type: TokenWhitespace, Data: " "},
2465 {Type: TokenIdentifier, Data: "a"},
2466 {Type: TokenLineTerminator, Data: "\n"},
2467 {Type: TokenKeyword, Data: "do"},
2468 {Type: TokenWhitespace, Data: " "},
2469 {Type: TokenWord, Data: "b"},
2470 {Type: TokenPunctuator, Data: ";"},
2471 {Type: TokenWhitespace, Data: " "},
2472 {Type: TokenKeyword, Data: "done"},
2473 {Type: parser.TokenDone, Data: ""},
2474 },
2475 },
2476 { // 143
2477 "select a in 1 2 3; do b; done",
2478 []parser.Token{
2479 {Type: TokenKeyword, Data: "select"},
2480 {Type: TokenWhitespace, Data: " "},
2481 {Type: TokenIdentifier, Data: "a"},
2482 {Type: TokenWhitespace, Data: " "},
2483 {Type: TokenKeyword, Data: "in"},
2484 {Type: TokenWhitespace, Data: " "},
2485 {Type: TokenWord, Data: "1"},
2486 {Type: TokenWhitespace, Data: " "},
2487 {Type: TokenWord, Data: "2"},
2488 {Type: TokenWhitespace, Data: " "},
2489 {Type: TokenWord, Data: "3"},
2490 {Type: TokenPunctuator, Data: ";"},
2491 {Type: TokenWhitespace, Data: " "},
2492 {Type: TokenKeyword, Data: "do"},
2493 {Type: TokenWhitespace, Data: " "},
2494 {Type: TokenWord, Data: "b"},
2495 {Type: TokenPunctuator, Data: ";"},
2496 {Type: TokenWhitespace, Data: " "},
2497 {Type: TokenKeyword, Data: "done"},
2498 {Type: parser.TokenDone, Data: ""},
2499 },
2500 },
2501 { // 144
2502 "select a in 1 2 3 do b; done",
2503 []parser.Token{
2504 {Type: TokenKeyword, Data: "select"},
2505 {Type: TokenWhitespace, Data: " "},
2506 {Type: TokenIdentifier, Data: "a"},
2507 {Type: TokenWhitespace, Data: " "},
2508 {Type: TokenKeyword, Data: "in"},
2509 {Type: TokenWhitespace, Data: " "},
2510 {Type: TokenWord, Data: "1"},
2511 {Type: TokenWhitespace, Data: " "},
2512 {Type: TokenWord, Data: "2"},
2513 {Type: TokenWhitespace, Data: " "},
2514 {Type: TokenWord, Data: "3"},
2515 {Type: TokenWhitespace, Data: " "},
2516 {Type: TokenWord, Data: "do"},
2517 {Type: TokenWhitespace, Data: " "},
2518 {Type: TokenWord, Data: "b"},
2519 {Type: TokenPunctuator, Data: ";"},
2520 {Type: TokenWhitespace, Data: " "},
2521 {Type: parser.TokenError, Data: "missing do"},
2522 },
2523 },
2524 { // 145
2525
2526 "coproc a b",
2527 []parser.Token{
2528 {Type: TokenKeyword, Data: "coproc"},
2529 {Type: TokenWhitespace, Data: " "},
2530 {Type: TokenWord, Data: "a"},
2531 {Type: TokenWhitespace, Data: " "},
2532 {Type: TokenWord, Data: "b"},
2533 {Type: parser.TokenDone, Data: ""},
2534 },
2535 },
2536 { // 146
2537
2538 "coproc fora b",
2539 []parser.Token{
2540 {Type: TokenKeyword, Data: "coproc"},
2541 {Type: TokenWhitespace, Data: " "},
2542 {Type: TokenWord, Data: "fora"},
2543 {Type: TokenWhitespace, Data: " "},
2544 {Type: TokenWord, Data: "b"},
2545 {Type: parser.TokenDone, Data: ""},
2546 },
2547 },
2548 { // 147
2549 "coproc while a; do b; done",
2550 []parser.Token{
2551 {Type: TokenKeyword, Data: "coproc"},
2552 {Type: TokenWhitespace, Data: " "},
2553 {Type: TokenKeyword, Data: "while"},
2554 {Type: TokenWhitespace, Data: " "},
2555 {Type: TokenWord, Data: "a"},
2556 {Type: TokenPunctuator, Data: ";"},
2557 {Type: TokenWhitespace, Data: " "},
2558 {Type: TokenKeyword, Data: "do"},
2559 {Type: TokenWhitespace, Data: " "},
2560 {Type: TokenWord, Data: "b"},
2561 {Type: TokenPunctuator, Data: ";"},
2562 {Type: TokenWhitespace, Data: " "},
2563 {Type: TokenKeyword, Data: "done"},
2564 {Type: parser.TokenDone, Data: ""},
2565 },
2566 },
2567 { // 148
2568 "coproc a while b; do c; done",
2569 []parser.Token{
2570 {Type: TokenKeyword, Data: "coproc"},
2571 {Type: TokenWhitespace, Data: " "},
2572 {Type: TokenIdentifier, Data: "a"},
2573 {Type: TokenWhitespace, Data: " "},
2574 {Type: TokenKeyword, Data: "while"},
2575 {Type: TokenWhitespace, Data: " "},
2576 {Type: TokenWord, Data: "b"},
2577 {Type: TokenPunctuator, Data: ";"},
2578 {Type: TokenWhitespace, Data: " "},
2579 {Type: TokenKeyword, Data: "do"},
2580 {Type: TokenWhitespace, Data: " "},
2581 {Type: TokenWord, Data: "c"},
2582 {Type: TokenPunctuator, Data: ";"},
2583 {Type: TokenWhitespace, Data: " "},
2584 {Type: TokenKeyword, Data: "done"},
2585 {Type: parser.TokenDone, Data: ""},
2586 },
2587 },
2588 { // 149
2589 "echo }",
2590 []parser.Token{
2591 {Type: TokenWord, Data: "echo"},
2592 {Type: TokenWhitespace, Data: " "},
2593 {Type: TokenPunctuator, Data: "}"},
2594 {Type: parser.TokenDone, Data: ""},
2595 },
2596 },
2597 { // 150
2598 "{ echo }",
2599 []parser.Token{
2600 {Type: TokenPunctuator, Data: "{"},
2601 {Type: TokenWhitespace, Data: " "},
2602 {Type: TokenWord, Data: "echo"},
2603 {Type: TokenWhitespace, Data: " "},
2604 {Type: TokenPunctuator, Data: "}"},
2605 {Type: parser.TokenError, Data: "unexpected EOF"},
2606 },
2607 },
2608 { // 151
2609 "{ echo };}",
2610 []parser.Token{
2611 {Type: TokenPunctuator, Data: "{"},
2612 {Type: TokenWhitespace, Data: " "},
2613 {Type: TokenWord, Data: "echo"},
2614 {Type: TokenWhitespace, Data: " "},
2615 {Type: TokenPunctuator, Data: "}"},
2616 {Type: TokenPunctuator, Data: ";"},
2617 {Type: TokenPunctuator, Data: "}"},
2618 {Type: parser.TokenDone, Data: ""},
2619 },
2620 },
2621 { // 152
2622 "function a{ b; }",
2623 []parser.Token{
2624 {Type: TokenKeyword, Data: "function"},
2625 {Type: TokenWhitespace, Data: " "},
2626 {Type: TokenFunctionIdentifier, Data: "a"},
2627 {Type: TokenPunctuator, Data: "{"},
2628 {Type: TokenWhitespace, Data: " "},
2629 {Type: TokenWord, Data: "b"},
2630 {Type: TokenPunctuator, Data: ";"},
2631 {Type: TokenWhitespace, Data: " "},
2632 {Type: TokenPunctuator, Data: "}"},
2633 {Type: parser.TokenDone, Data: ""},
2634 },
2635 },
2636 { // 153
2637 "function a{ b; }\nfunction a\n{ b; }",
2638 []parser.Token{
2639 {Type: TokenKeyword, Data: "function"},
2640 {Type: TokenWhitespace, Data: " "},
2641 {Type: TokenFunctionIdentifier, Data: "a"},
2642 {Type: TokenPunctuator, Data: "{"},
2643 {Type: TokenWhitespace, Data: " "},
2644 {Type: TokenWord, Data: "b"},
2645 {Type: TokenPunctuator, Data: ";"},
2646 {Type: TokenWhitespace, Data: " "},
2647 {Type: TokenPunctuator, Data: "}"},
2648 {Type: TokenLineTerminator, Data: "\n"},
2649 {Type: TokenKeyword, Data: "function"},
2650 {Type: TokenWhitespace, Data: " "},
2651 {Type: TokenFunctionIdentifier, Data: "a"},
2652 {Type: TokenLineTerminator, Data: "\n"},
2653 {Type: TokenPunctuator, Data: "{"},
2654 {Type: TokenWhitespace, Data: " "},
2655 {Type: TokenWord, Data: "b"},
2656 {Type: TokenPunctuator, Data: ";"},
2657 {Type: TokenWhitespace, Data: " "},
2658 {Type: TokenPunctuator, Data: "}"},
2659 {Type: parser.TokenDone, Data: ""},
2660 },
2661 },
2662 { // 154
2663 "function a\n{ b; }",
2664 []parser.Token{
2665 {Type: TokenKeyword, Data: "function"},
2666 {Type: TokenWhitespace, Data: " "},
2667 {Type: TokenFunctionIdentifier, Data: "a"},
2668 {Type: TokenLineTerminator, Data: "\n"},
2669 {Type: TokenPunctuator, Data: "{"},
2670 {Type: TokenWhitespace, Data: " "},
2671 {Type: TokenWord, Data: "b"},
2672 {Type: TokenPunctuator, Data: ";"},
2673 {Type: TokenWhitespace, Data: " "},
2674 {Type: TokenPunctuator, Data: "}"},
2675 {Type: parser.TokenDone, Data: ""},
2676 },
2677 },
2678 { // 155
2679 "function a(){ b; }",
2680 []parser.Token{
2681 {Type: TokenKeyword, Data: "function"},
2682 {Type: TokenWhitespace, Data: " "},
2683 {Type: TokenFunctionIdentifier, Data: "a"},
2684 {Type: TokenPunctuator, Data: "("},
2685 {Type: TokenPunctuator, Data: ")"},
2686 {Type: TokenPunctuator, Data: "{"},
2687 {Type: TokenWhitespace, Data: " "},
2688 {Type: TokenWord, Data: "b"},
2689 {Type: TokenPunctuator, Data: ";"},
2690 {Type: TokenWhitespace, Data: " "},
2691 {Type: TokenPunctuator, Data: "}"},
2692 {Type: parser.TokenDone, Data: ""},
2693 },
2694 },
2695 { // 156
2696 "function a ( ) { b; }",
2697 []parser.Token{
2698 {Type: TokenKeyword, Data: "function"},
2699 {Type: TokenWhitespace, Data: " "},
2700 {Type: TokenFunctionIdentifier, Data: "a"},
2701 {Type: TokenWhitespace, Data: " "},
2702 {Type: TokenPunctuator, Data: "("},
2703 {Type: TokenWhitespace, Data: " "},
2704 {Type: TokenPunctuator, Data: ")"},
2705 {Type: TokenWhitespace, Data: " "},
2706 {Type: TokenPunctuator, Data: "{"},
2707 {Type: TokenWhitespace, Data: " "},
2708 {Type: TokenWord, Data: "b"},
2709 {Type: TokenPunctuator, Data: ";"},
2710 {Type: TokenWhitespace, Data: " "},
2711 {Type: TokenPunctuator, Data: "}"},
2712 {Type: parser.TokenDone, Data: ""},
2713 },
2714 },
2715 { // 157
2716 "function a() b",
2717 []parser.Token{
2718 {Type: TokenKeyword, Data: "function"},
2719 {Type: TokenWhitespace, Data: " "},
2720 {Type: TokenFunctionIdentifier, Data: "a"},
2721 {Type: TokenPunctuator, Data: "("},
2722 {Type: TokenPunctuator, Data: ")"},
2723 {Type: TokenWhitespace, Data: " "},
2724 {Type: parser.TokenError, Data: "invalid keyword"},
2725 },
2726 },
2727 { // 158
2728 "a(){ b; }",
2729 []parser.Token{
2730 {Type: TokenFunctionIdentifier, Data: "a"},
2731 {Type: TokenPunctuator, Data: "("},
2732 {Type: TokenPunctuator, Data: ")"},
2733 {Type: TokenPunctuator, Data: "{"},
2734 {Type: TokenWhitespace, Data: " "},
2735 {Type: TokenWord, Data: "b"},
2736 {Type: TokenPunctuator, Data: ";"},
2737 {Type: TokenWhitespace, Data: " "},
2738 {Type: TokenPunctuator, Data: "}"},
2739 {Type: parser.TokenDone, Data: ""},
2740 },
2741 },
2742 { // 159
2743 "a( ) { b; }",
2744 []parser.Token{
2745 {Type: TokenFunctionIdentifier, Data: "a"},
2746 {Type: TokenPunctuator, Data: "("},
2747 {Type: TokenWhitespace, Data: " "},
2748 {Type: TokenPunctuator, Data: ")"},
2749 {Type: TokenWhitespace, Data: " "},
2750 {Type: TokenPunctuator, Data: "{"},
2751 {Type: TokenWhitespace, Data: " "},
2752 {Type: TokenWord, Data: "b"},
2753 {Type: TokenPunctuator, Data: ";"},
2754 {Type: TokenWhitespace, Data: " "},
2755 {Type: TokenPunctuator, Data: "}"},
2756 {Type: parser.TokenDone, Data: ""},
2757 },
2758 },
2759 { // 160
2760 "a() b",
2761 []parser.Token{
2762 {Type: TokenFunctionIdentifier, Data: "a"},
2763 {Type: TokenPunctuator, Data: "("},
2764 {Type: TokenPunctuator, Data: ")"},
2765 {Type: TokenWhitespace, Data: " "},
2766 {Type: parser.TokenError, Data: "invalid keyword"},
2767 },
2768 },
2769 { // 161
2770 "a() b",
2771 []parser.Token{
2772 {Type: TokenFunctionIdentifier, Data: "a"},
2773 {Type: TokenPunctuator, Data: "("},
2774 {Type: TokenPunctuator, Data: ")"},
2775 {Type: TokenWhitespace, Data: " "},
2776 {Type: parser.TokenError, Data: "invalid keyword"},
2777 },
2778 },
2779 { // 162
2780 "[[ -f file ]]",
2781 []parser.Token{
2782 {Type: TokenKeyword, Data: "[["},
2783 {Type: TokenWhitespace, Data: " "},
2784 {Type: TokenKeyword, Data: "-f"},
2785 {Type: TokenWhitespace, Data: " "},
2786 {Type: TokenWord, Data: "file"},
2787 {Type: TokenWhitespace, Data: " "},
2788 {Type: TokenKeyword, Data: "]]"},
2789 {Type: parser.TokenDone, Data: ""},
2790 },
2791 },
2792 { // 163
2793 "[[ ! -e file\"str\" ]]",
2794 []parser.Token{
2795 {Type: TokenKeyword, Data: "[["},
2796 {Type: TokenWhitespace, Data: " "},
2797 {Type: TokenPunctuator, Data: "!"},
2798 {Type: TokenWhitespace, Data: " "},
2799 {Type: TokenKeyword, Data: "-e"},
2800 {Type: TokenWhitespace, Data: " "},
2801 {Type: TokenWord, Data: "file"},
2802 {Type: TokenString, Data: "\"str\""},
2803 {Type: TokenWhitespace, Data: " "},
2804 {Type: TokenKeyword, Data: "]]"},
2805 {Type: parser.TokenDone, Data: ""},
2806 },
2807 },
2808 { // 164
2809 "[[ -S \"str\"a || -g $b\"c\"d ]]",
2810 []parser.Token{
2811 {Type: TokenKeyword, Data: "[["},
2812 {Type: TokenWhitespace, Data: " "},
2813 {Type: TokenKeyword, Data: "-S"},
2814 {Type: TokenWhitespace, Data: " "},
2815 {Type: TokenString, Data: "\"str\""},
2816 {Type: TokenWord, Data: "a"},
2817 {Type: TokenWhitespace, Data: " "},
2818 {Type: TokenPunctuator, Data: "||"},
2819 {Type: TokenWhitespace, Data: " "},
2820 {Type: TokenKeyword, Data: "-g"},
2821 {Type: TokenWhitespace, Data: " "},
2822 {Type: TokenIdentifier, Data: "$b"},
2823 {Type: TokenString, Data: "\"c\""},
2824 {Type: TokenWord, Data: "d"},
2825 {Type: TokenWhitespace, Data: " "},
2826 {Type: TokenKeyword, Data: "]]"},
2827 {Type: parser.TokenDone, Data: ""},
2828 },
2829 },
2830 { // 165
2831 "[[ a = b ]]",
2832 []parser.Token{
2833 {Type: TokenKeyword, Data: "[["},
2834 {Type: TokenWhitespace, Data: " "},
2835 {Type: TokenWord, Data: "a"},
2836 {Type: TokenWhitespace, Data: " "},
2837 {Type: TokenBinaryOperator, Data: "="},
2838 {Type: TokenWhitespace, Data: " "},
2839 {Type: TokenPattern, Data: "b"},
2840 {Type: TokenWhitespace, Data: " "},
2841 {Type: TokenKeyword, Data: "]]"},
2842 {Type: parser.TokenDone, Data: ""},
2843 },
2844 },
2845 { // 166
2846 "[[ a =~ b$ ]]",
2847 []parser.Token{
2848 {Type: TokenKeyword, Data: "[["},
2849 {Type: TokenWhitespace, Data: " "},
2850 {Type: TokenWord, Data: "a"},
2851 {Type: TokenWhitespace, Data: " "},
2852 {Type: TokenBinaryOperator, Data: "=~"},
2853 {Type: TokenWhitespace, Data: " "},
2854 {Type: TokenPattern, Data: "b$"},
2855 {Type: TokenWhitespace, Data: " "},
2856 {Type: TokenKeyword, Data: "]]"},
2857 {Type: parser.TokenDone, Data: ""},
2858 },
2859 },
2860 { // 167
2861 "[[ a =~ ^(19|20)[0-9]{2}$ ]]",
2862 []parser.Token{
2863 {Type: TokenKeyword, Data: "[["},
2864 {Type: TokenWhitespace, Data: " "},
2865 {Type: TokenWord, Data: "a"},
2866 {Type: TokenWhitespace, Data: " "},
2867 {Type: TokenBinaryOperator, Data: "=~"},
2868 {Type: TokenWhitespace, Data: " "},
2869 {Type: TokenPattern, Data: "^(19|20)[0-9]{2}$"},
2870 {Type: TokenWhitespace, Data: " "},
2871 {Type: TokenKeyword, Data: "]]"},
2872 {Type: parser.TokenDone, Data: ""},
2873 },
2874 },
2875 { // 168
2876 "[[ a$b = c\"d\" && e\"f\"g != \"h\"$i ]]",
2877 []parser.Token{
2878 {Type: TokenKeyword, Data: "[["},
2879 {Type: TokenWhitespace, Data: " "},
2880 {Type: TokenWord, Data: "a"},
2881 {Type: TokenIdentifier, Data: "$b"},
2882 {Type: TokenWhitespace, Data: " "},
2883 {Type: TokenBinaryOperator, Data: "="},
2884 {Type: TokenWhitespace, Data: " "},
2885 {Type: TokenPattern, Data: "c"},
2886 {Type: TokenString, Data: "\"d\""},
2887 {Type: TokenWhitespace, Data: " "},
2888 {Type: TokenPunctuator, Data: "&&"},
2889 {Type: TokenWhitespace, Data: " "},
2890 {Type: TokenWord, Data: "e"},
2891 {Type: TokenString, Data: "\"f\""},
2892 {Type: TokenWord, Data: "g"},
2893 {Type: TokenWhitespace, Data: " "},
2894 {Type: TokenBinaryOperator, Data: "!="},
2895 {Type: TokenWhitespace, Data: " "},
2896 {Type: TokenString, Data: "\"h\""},
2897 {Type: TokenIdentifier, Data: "$i"},
2898 {Type: TokenWhitespace, Data: " "},
2899 {Type: TokenKeyword, Data: "]]"},
2900 {Type: parser.TokenDone, Data: ""},
2901 },
2902 },
2903 { // 169
2904 "[[ a -gt b ]]",
2905 []parser.Token{
2906 {Type: TokenKeyword, Data: "[["},
2907 {Type: TokenWhitespace, Data: " "},
2908 {Type: TokenWord, Data: "a"},
2909 {Type: TokenWhitespace, Data: " "},
2910 {Type: TokenKeyword, Data: "-gt"},
2911 {Type: TokenWhitespace, Data: " "},
2912 {Type: TokenWord, Data: "b"},
2913 {Type: TokenWhitespace, Data: " "},
2914 {Type: TokenKeyword, Data: "]]"},
2915 {Type: parser.TokenDone, Data: ""},
2916 },
2917 },
2918 { // 170
2919 "[[ # A\n# B\n\n# C\na -gt b # D\n]]",
2920 []parser.Token{
2921 {Type: TokenKeyword, Data: "[["},
2922 {Type: TokenWhitespace, Data: " "},
2923 {Type: TokenComment, Data: "# A"},
2924 {Type: TokenLineTerminator, Data: "\n"},
2925 {Type: TokenComment, Data: "# B"},
2926 {Type: TokenLineTerminator, Data: "\n\n"},
2927 {Type: TokenComment, Data: "# C"},
2928 {Type: TokenLineTerminator, Data: "\n"},
2929 {Type: TokenWord, Data: "a"},
2930 {Type: TokenWhitespace, Data: " "},
2931 {Type: TokenKeyword, Data: "-gt"},
2932 {Type: TokenWhitespace, Data: " "},
2933 {Type: TokenWord, Data: "b"},
2934 {Type: TokenWhitespace, Data: " "},
2935 {Type: TokenComment, Data: "# D"},
2936 {Type: TokenLineTerminator, Data: "\n"},
2937 {Type: TokenKeyword, Data: "]]"},
2938 {Type: parser.TokenDone, Data: ""},
2939 },
2940 },
2941 { // 171
2942 "[[ a$b -eq c\"d\" && e\"f\"g -ne \"h\"$i ]]",
2943 []parser.Token{
2944 {Type: TokenKeyword, Data: "[["},
2945 {Type: TokenWhitespace, Data: " "},
2946 {Type: TokenWord, Data: "a"},
2947 {Type: TokenIdentifier, Data: "$b"},
2948 {Type: TokenWhitespace, Data: " "},
2949 {Type: TokenKeyword, Data: "-eq"},
2950 {Type: TokenWhitespace, Data: " "},
2951 {Type: TokenWord, Data: "c"},
2952 {Type: TokenString, Data: "\"d\""},
2953 {Type: TokenWhitespace, Data: " "},
2954 {Type: TokenPunctuator, Data: "&&"},
2955 {Type: TokenWhitespace, Data: " "},
2956 {Type: TokenWord, Data: "e"},
2957 {Type: TokenString, Data: "\"f\""},
2958 {Type: TokenWord, Data: "g"},
2959 {Type: TokenWhitespace, Data: " "},
2960 {Type: TokenKeyword, Data: "-ne"},
2961 {Type: TokenWhitespace, Data: " "},
2962 {Type: TokenString, Data: "\"h\""},
2963 {Type: TokenIdentifier, Data: "$i"},
2964 {Type: TokenWhitespace, Data: " "},
2965 {Type: TokenKeyword, Data: "]]"},
2966 {Type: parser.TokenDone, Data: ""},
2967 },
2968 },
2969 { // 172
2970 "[[ (a = b || c = d) && $e -le $f ]]",
2971 []parser.Token{
2972 {Type: TokenKeyword, Data: "[["},
2973 {Type: TokenWhitespace, Data: " "},
2974 {Type: TokenPunctuator, Data: "("},
2975 {Type: TokenWord, Data: "a"},
2976 {Type: TokenWhitespace, Data: " "},
2977 {Type: TokenBinaryOperator, Data: "="},
2978 {Type: TokenWhitespace, Data: " "},
2979 {Type: TokenPattern, Data: "b"},
2980 {Type: TokenWhitespace, Data: " "},
2981 {Type: TokenPunctuator, Data: "||"},
2982 {Type: TokenWhitespace, Data: " "},
2983 {Type: TokenWord, Data: "c"},
2984 {Type: TokenWhitespace, Data: " "},
2985 {Type: TokenBinaryOperator, Data: "="},
2986 {Type: TokenWhitespace, Data: " "},
2987 {Type: TokenPattern, Data: "d"},
2988 {Type: TokenPunctuator, Data: ")"},
2989 {Type: TokenWhitespace, Data: " "},
2990 {Type: TokenPunctuator, Data: "&&"},
2991 {Type: TokenWhitespace, Data: " "},
2992 {Type: TokenIdentifier, Data: "$e"},
2993 {Type: TokenWhitespace, Data: " "},
2994 {Type: TokenKeyword, Data: "-le"},
2995 {Type: TokenWhitespace, Data: " "},
2996 {Type: TokenIdentifier, Data: "$f"},
2997 {Type: TokenWhitespace, Data: " "},
2998 {Type: TokenKeyword, Data: "]]"},
2999 {Type: parser.TokenDone, Data: ""},
3000 },
3001 },
3002 { // 173
3003 "[[ (a=b) ]]",
3004 []parser.Token{
3005 {Type: TokenKeyword, Data: "[["},
3006 {Type: TokenWhitespace, Data: " "},
3007 {Type: TokenPunctuator, Data: "("},
3008 {Type: TokenWord, Data: "a=b"},
3009 {Type: TokenPunctuator, Data: ")"},
3010 {Type: TokenWhitespace, Data: " "},
3011 {Type: TokenKeyword, Data: "]]"},
3012 {Type: parser.TokenDone, Data: ""},
3013 },
3014 },
3015 { // 174
3016 "[[ a < b ]]",
3017 []parser.Token{
3018 {Type: TokenKeyword, Data: "[["},
3019 {Type: TokenWhitespace, Data: " "},
3020 {Type: TokenWord, Data: "a"},
3021 {Type: TokenWhitespace, Data: " "},
3022 {Type: TokenBinaryOperator, Data: "<"},
3023 {Type: TokenWhitespace, Data: " "},
3024 {Type: TokenPattern, Data: "b"},
3025 {Type: TokenWhitespace, Data: " "},
3026 {Type: TokenKeyword, Data: "]]"},
3027 {Type: parser.TokenDone, Data: ""},
3028 },
3029 },
3030 { // 175
3031 "[[ a<b ]]",
3032 []parser.Token{
3033 {Type: TokenKeyword, Data: "[["},
3034 {Type: TokenWhitespace, Data: " "},
3035 {Type: TokenWord, Data: "a"},
3036 {Type: TokenBinaryOperator, Data: "<"},
3037 {Type: TokenPattern, Data: "b"},
3038 {Type: TokenWhitespace, Data: " "},
3039 {Type: TokenKeyword, Data: "]]"},
3040 {Type: parser.TokenDone, Data: ""},
3041 },
3042 },
3043 { // 176
3044 "[[ (a = b) ]]",
3045 []parser.Token{
3046 {Type: TokenKeyword, Data: "[["},
3047 {Type: TokenWhitespace, Data: " "},
3048 {Type: TokenPunctuator, Data: "("},
3049 {Type: TokenWord, Data: "a"},
3050 {Type: TokenWhitespace, Data: " "},
3051 {Type: TokenBinaryOperator, Data: "="},
3052 {Type: TokenWhitespace, Data: " "},
3053 {Type: TokenPattern, Data: "b"},
3054 {Type: TokenPunctuator, Data: ")"},
3055 {Type: TokenWhitespace, Data: " "},
3056 {Type: TokenKeyword, Data: "]]"},
3057 {Type: parser.TokenDone, Data: ""},
3058 },
3059 },
3060 { // 177
3061 "[[ (a -gt b) ]]",
3062 []parser.Token{
3063 {Type: TokenKeyword, Data: "[["},
3064 {Type: TokenWhitespace, Data: " "},
3065 {Type: TokenPunctuator, Data: "("},
3066 {Type: TokenWord, Data: "a"},
3067 {Type: TokenWhitespace, Data: " "},
3068 {Type: TokenKeyword, Data: "-gt"},
3069 {Type: TokenWhitespace, Data: " "},
3070 {Type: TokenWord, Data: "b"},
3071 {Type: TokenPunctuator, Data: ")"},
3072 {Type: TokenWhitespace, Data: " "},
3073 {Type: TokenKeyword, Data: "]]"},
3074 {Type: parser.TokenDone, Data: ""},
3075 },
3076 },
3077 { // 178
3078 "[[\na\n=\nb\n]]",
3079 []parser.Token{
3080 {Type: TokenKeyword, Data: "[["},
3081 {Type: TokenLineTerminator, Data: "\n"},
3082 {Type: TokenWord, Data: "a"},
3083 {Type: TokenLineTerminator, Data: "\n"},
3084 {Type: TokenBinaryOperator, Data: "="},
3085 {Type: TokenLineTerminator, Data: "\n"},
3086 {Type: TokenPattern, Data: "b"},
3087 {Type: TokenLineTerminator, Data: "\n"},
3088 {Type: TokenKeyword, Data: "]]"},
3089 {Type: parser.TokenDone, Data: ""},
3090 },
3091 },
3092 { // 179
3093 "[[\n(a=b)\n]]",
3094 []parser.Token{
3095 {Type: TokenKeyword, Data: "[["},
3096 {Type: TokenLineTerminator, Data: "\n"},
3097 {Type: TokenPunctuator, Data: "("},
3098 {Type: TokenWord, Data: "a=b"},
3099 {Type: TokenPunctuator, Data: ")"},
3100 {Type: TokenLineTerminator, Data: "\n"},
3101 {Type: TokenKeyword, Data: "]]"},
3102 {Type: parser.TokenDone, Data: ""},
3103 },
3104 },
3105 { // 180
3106 "[[ ",
3107 []parser.Token{
3108 {Type: TokenKeyword, Data: "[["},
3109 {Type: TokenWhitespace, Data: " "},
3110 {Type: parser.TokenError, Data: "unexpected EOF"},
3111 },
3112 },
3113 { // 181
3114 "[[ | = b ]]",
3115 []parser.Token{
3116 {Type: TokenKeyword, Data: "[["},
3117 {Type: TokenWhitespace, Data: " "},
3118 {Type: parser.TokenError, Data: "invalid character"},
3119 },
3120 },
3121 { // 182
3122 "[[ & = b ]]",
3123 []parser.Token{
3124 {Type: TokenKeyword, Data: "[["},
3125 {Type: TokenWhitespace, Data: " "},
3126 {Type: parser.TokenError, Data: "invalid character"},
3127 },
3128 },
3129 { // 183
3130 "[[ \"a\" = b ]]",
3131 []parser.Token{
3132 {Type: TokenKeyword, Data: "[["},
3133 {Type: TokenWhitespace, Data: " "},
3134 {Type: TokenString, Data: "\"a\""},
3135 {Type: TokenWhitespace, Data: " "},
3136 {Type: TokenBinaryOperator, Data: "="},
3137 {Type: TokenWhitespace, Data: " "},
3138 {Type: TokenPattern, Data: "b"},
3139 {Type: TokenWhitespace, Data: " "},
3140 {Type: TokenKeyword, Data: "]]"},
3141 {Type: parser.TokenDone, Data: ""},
3142 },
3143 },
3144 { // 184
3145 "[[ ]]a = ]]b ]]",
3146 []parser.Token{
3147 {Type: TokenKeyword, Data: "[["},
3148 {Type: TokenWhitespace, Data: " "},
3149 {Type: TokenWord, Data: "]]a"},
3150 {Type: TokenWhitespace, Data: " "},
3151 {Type: TokenBinaryOperator, Data: "="},
3152 {Type: TokenWhitespace, Data: " "},
3153 {Type: TokenPattern, Data: "]]b"},
3154 {Type: TokenWhitespace, Data: " "},
3155 {Type: TokenKeyword, Data: "]]"},
3156 {Type: parser.TokenDone, Data: ""},
3157 },
3158 },
3159 { // 185
3160 "[[ ( a = ]]b ) ]]",
3161 []parser.Token{
3162 {Type: TokenKeyword, Data: "[["},
3163 {Type: TokenWhitespace, Data: " "},
3164 {Type: TokenPunctuator, Data: "("},
3165 {Type: TokenWhitespace, Data: " "},
3166 {Type: TokenWord, Data: "a"},
3167 {Type: TokenWhitespace, Data: " "},
3168 {Type: TokenBinaryOperator, Data: "="},
3169 {Type: TokenWhitespace, Data: " "},
3170 {Type: TokenPattern, Data: "]]b"},
3171 {Type: TokenWhitespace, Data: " "},
3172 {Type: TokenPunctuator, Data: ")"},
3173 {Type: TokenWhitespace, Data: " "},
3174 {Type: TokenKeyword, Data: "]]"},
3175 {Type: parser.TokenDone, Data: ""},
3176 },
3177 },
3178 { // 186
3179 "[[ ) = ) ]]",
3180 []parser.Token{
3181 {Type: TokenKeyword, Data: "[["},
3182 {Type: TokenWhitespace, Data: " "},
3183 {Type: parser.TokenError, Data: "invalid character"},
3184 },
3185 },
3186 { // 187
3187 "[[ ( ]] ) ]]",
3188 []parser.Token{
3189 {Type: TokenKeyword, Data: "[["},
3190 {Type: TokenWhitespace, Data: " "},
3191 {Type: TokenPunctuator, Data: "("},
3192 {Type: TokenWhitespace, Data: " "},
3193 {Type: parser.TokenError, Data: "invalid character"},
3194 },
3195 },
3196 { // 188
3197 "[[ a \n= b ]]",
3198 []parser.Token{
3199 {Type: TokenKeyword, Data: "[["},
3200 {Type: TokenWhitespace, Data: " "},
3201 {Type: TokenWord, Data: "a"},
3202 {Type: TokenWhitespace, Data: " "},
3203 {Type: TokenLineTerminator, Data: "\n"},
3204 {Type: TokenBinaryOperator, Data: "="},
3205 {Type: TokenWhitespace, Data: " "},
3206 {Type: TokenPattern, Data: "b"},
3207 {Type: TokenWhitespace, Data: " "},
3208 {Type: TokenKeyword, Data: "]]"},
3209 {Type: parser.TokenDone, Data: ""},
3210 },
3211 },
3212 { // 189
3213 "[[ a\n = b ]]",
3214 []parser.Token{
3215 {Type: TokenKeyword, Data: "[["},
3216 {Type: TokenWhitespace, Data: " "},
3217 {Type: TokenWord, Data: "a"},
3218 {Type: TokenLineTerminator, Data: "\n"},
3219 {Type: TokenWhitespace, Data: " "},
3220 {Type: TokenBinaryOperator, Data: "="},
3221 {Type: TokenWhitespace, Data: " "},
3222 {Type: TokenPattern, Data: "b"},
3223 {Type: TokenWhitespace, Data: " "},
3224 {Type: TokenKeyword, Data: "]]"},
3225 {Type: parser.TokenDone, Data: ""},
3226 },
3227 },
3228 { // 190
3229 "[[ a ",
3230 []parser.Token{
3231 {Type: TokenKeyword, Data: "[["},
3232 {Type: TokenWhitespace, Data: " "},
3233 {Type: TokenWord, Data: "a"},
3234 {Type: TokenWhitespace, Data: " "},
3235 {Type: parser.TokenError, Data: "unexpected EOF"},
3236 },
3237 },
3238 { // 191
3239 "[[ a ! b ]]",
3240 []parser.Token{
3241 {Type: TokenKeyword, Data: "[["},
3242 {Type: TokenWhitespace, Data: " "},
3243 {Type: TokenWord, Data: "a"},
3244 {Type: TokenWhitespace, Data: " "},
3245 {Type: parser.TokenError, Data: "invalid character"},
3246 },
3247 },
3248 { // 192
3249 "[[ a -ez b ]]",
3250 []parser.Token{
3251 {Type: TokenKeyword, Data: "[["},
3252 {Type: TokenWhitespace, Data: " "},
3253 {Type: TokenWord, Data: "a"},
3254 {Type: TokenWhitespace, Data: " "},
3255 {Type: parser.TokenError, Data: "invalid character"},
3256 },
3257 },
3258 { // 193
3259 "[[ a -nz b ]]",
3260 []parser.Token{
3261 {Type: TokenKeyword, Data: "[["},
3262 {Type: TokenWhitespace, Data: " "},
3263 {Type: TokenWord, Data: "a"},
3264 {Type: TokenWhitespace, Data: " "},
3265 {Type: parser.TokenError, Data: "invalid character"},
3266 },
3267 },
3268 { // 194
3269 "[[ a -gz b ]]",
3270 []parser.Token{
3271 {Type: TokenKeyword, Data: "[["},
3272 {Type: TokenWhitespace, Data: " "},
3273 {Type: TokenWord, Data: "a"},
3274 {Type: TokenWhitespace, Data: " "},
3275 {Type: parser.TokenError, Data: "invalid character"},
3276 },
3277 },
3278 { // 195
3279 "[[ a -lz b ]]",
3280 []parser.Token{
3281 {Type: TokenKeyword, Data: "[["},
3282 {Type: TokenWhitespace, Data: " "},
3283 {Type: TokenWord, Data: "a"},
3284 {Type: TokenWhitespace, Data: " "},
3285 {Type: parser.TokenError, Data: "invalid character"},
3286 },
3287 },
3288 { // 196
3289 "[[ a -oz b ]]",
3290 []parser.Token{
3291 {Type: TokenKeyword, Data: "[["},
3292 {Type: TokenWhitespace, Data: " "},
3293 {Type: TokenWord, Data: "a"},
3294 {Type: TokenWhitespace, Data: " "},
3295 {Type: parser.TokenError, Data: "invalid character"},
3296 },
3297 },
3298 { // 197
3299 "[[ a -z b ]]",
3300 []parser.Token{
3301 {Type: TokenKeyword, Data: "[["},
3302 {Type: TokenWhitespace, Data: " "},
3303 {Type: TokenWord, Data: "a"},
3304 {Type: TokenWhitespace, Data: " "},
3305 {Type: parser.TokenError, Data: "invalid character"},
3306 },
3307 },
3308 { // 198
3309 "[[ -z < ]]",
3310 []parser.Token{
3311 {Type: TokenKeyword, Data: "[["},
3312 {Type: TokenWhitespace, Data: " "},
3313 {Type: TokenKeyword, Data: "-z"},
3314 {Type: TokenWhitespace, Data: " "},
3315 {Type: parser.TokenError, Data: "invalid character"},
3316 },
3317 },
3318 { // 199
3319 "[[ -z \n a ]]",
3320 []parser.Token{
3321 {Type: TokenKeyword, Data: "[["},
3322 {Type: TokenWhitespace, Data: " "},
3323 {Type: TokenKeyword, Data: "-z"},
3324 {Type: TokenWhitespace, Data: " "},
3325 {Type: TokenLineTerminator, Data: "\n"},
3326 {Type: TokenWhitespace, Data: " "},
3327 {Type: TokenWord, Data: "a"},
3328 {Type: TokenWhitespace, Data: " "},
3329 {Type: TokenKeyword, Data: "]]"},
3330 {Type: parser.TokenDone, Data: ""},
3331 },
3332 },
3333 { // 200
3334 "[[ a = b\\nc ]]",
3335 []parser.Token{
3336 {Type: TokenKeyword, Data: "[["},
3337 {Type: TokenWhitespace, Data: " "},
3338 {Type: TokenWord, Data: "a"},
3339 {Type: TokenWhitespace, Data: " "},
3340 {Type: TokenBinaryOperator, Data: "="},
3341 {Type: TokenWhitespace, Data: " "},
3342 {Type: TokenPattern, Data: "b\\nc"},
3343 {Type: TokenWhitespace, Data: " "},
3344 {Type: TokenKeyword, Data: "]]"},
3345 {Type: parser.TokenDone, Data: ""},
3346 },
3347 },
3348 { // 201
3349 "[[ a = b",
3350 []parser.Token{
3351 {Type: TokenKeyword, Data: "[["},
3352 {Type: TokenWhitespace, Data: " "},
3353 {Type: TokenWord, Data: "a"},
3354 {Type: TokenWhitespace, Data: " "},
3355 {Type: TokenBinaryOperator, Data: "="},
3356 {Type: TokenWhitespace, Data: " "},
3357 {Type: parser.TokenError, Data: "unexpected EOF"},
3358 },
3359 },
3360 { // 202
3361 "[[ -z `a` ]]",
3362 []parser.Token{
3363 {Type: TokenKeyword, Data: "[["},
3364 {Type: TokenWhitespace, Data: " "},
3365 {Type: TokenKeyword, Data: "-z"},
3366 {Type: TokenWhitespace, Data: " "},
3367 {Type: TokenOpenBacktick, Data: "`"},
3368 {Type: TokenWord, Data: "a"},
3369 {Type: TokenCloseBacktick, Data: "`"},
3370 {Type: TokenWhitespace, Data: " "},
3371 {Type: TokenKeyword, Data: "]]"},
3372 {Type: parser.TokenDone, Data: ""},
3373 },
3374 },
3375 { // 203
3376 "[[ -z | ]]",
3377 []parser.Token{
3378 {Type: TokenKeyword, Data: "[["},
3379 {Type: TokenWhitespace, Data: " "},
3380 {Type: TokenKeyword, Data: "-z"},
3381 {Type: TokenWhitespace, Data: " "},
3382 {Type: parser.TokenError, Data: "invalid character"},
3383 },
3384 },
3385 { // 204
3386 "[[ -z a",
3387 []parser.Token{
3388 {Type: TokenKeyword, Data: "[["},
3389 {Type: TokenWhitespace, Data: " "},
3390 {Type: TokenKeyword, Data: "-z"},
3391 {Type: TokenWhitespace, Data: " "},
3392 {Type: TokenWord, Data: "a"},
3393 {Type: parser.TokenError, Data: "unexpected EOF"},
3394 },
3395 },
3396 { // 205
3397 "[[ -z ",
3398 []parser.Token{
3399 {Type: TokenKeyword, Data: "[["},
3400 {Type: TokenWhitespace, Data: " "},
3401 {Type: TokenKeyword, Data: "-z"},
3402 {Type: TokenWhitespace, Data: " "},
3403 {Type: parser.TokenError, Data: "unexpected EOF"},
3404 },
3405 },
3406 { // 206
3407 "[[ -z #comment\na ]]",
3408 []parser.Token{
3409 {Type: TokenKeyword, Data: "[["},
3410 {Type: TokenWhitespace, Data: " "},
3411 {Type: TokenKeyword, Data: "-z"},
3412 {Type: TokenWhitespace, Data: " "},
3413 {Type: parser.TokenError, Data: "invalid character"},
3414 },
3415 },
3416 { // 207
3417 "[[ a -eq #comment\nb ]]",
3418 []parser.Token{
3419 {Type: TokenKeyword, Data: "[["},
3420 {Type: TokenWhitespace, Data: " "},
3421 {Type: TokenWord, Data: "a"},
3422 {Type: TokenWhitespace, Data: " "},
3423 {Type: TokenKeyword, Data: "-eq"},
3424 {Type: TokenWhitespace, Data: " "},
3425 {Type: parser.TokenError, Data: "invalid character"},
3426 },
3427 },
3428 { // 208
3429 "[[ #comment\na = b ]]",
3430 []parser.Token{
3431 {Type: TokenKeyword, Data: "[["},
3432 {Type: TokenWhitespace, Data: " "},
3433 {Type: TokenComment, Data: "#comment"},
3434 {Type: TokenLineTerminator, Data: "\n"},
3435 {Type: TokenWord, Data: "a"},
3436 {Type: TokenWhitespace, Data: " "},
3437 {Type: TokenBinaryOperator, Data: "="},
3438 {Type: TokenWhitespace, Data: " "},
3439 {Type: TokenPattern, Data: "b"},
3440 {Type: TokenWhitespace, Data: " "},
3441 {Type: TokenKeyword, Data: "]]"},
3442 {Type: parser.TokenDone, Data: ""},
3443 },
3444 },
3445 { // 209
3446 "[[ ( a = b ) #comment\n ]]",
3447 []parser.Token{
3448 {Type: TokenKeyword, Data: "[["},
3449 {Type: TokenWhitespace, Data: " "},
3450 {Type: TokenPunctuator, Data: "("},
3451 {Type: TokenWhitespace, Data: " "},
3452 {Type: TokenWord, Data: "a"},
3453 {Type: TokenWhitespace, Data: " "},
3454 {Type: TokenBinaryOperator, Data: "="},
3455 {Type: TokenWhitespace, Data: " "},
3456 {Type: TokenPattern, Data: "b"},
3457 {Type: TokenWhitespace, Data: " "},
3458 {Type: TokenPunctuator, Data: ")"},
3459 {Type: TokenWhitespace, Data: " "},
3460 {Type: TokenComment, Data: "#comment"},
3461 {Type: TokenLineTerminator, Data: "\n"},
3462 {Type: TokenWhitespace, Data: " "},
3463 {Type: TokenKeyword, Data: "]]"},
3464 {Type: parser.TokenDone, Data: ""},
3465 },
3466 },
3467 { // 210
3468 "[[ a = #comment\nb ]]",
3469 []parser.Token{
3470 {Type: TokenKeyword, Data: "[["},
3471 {Type: TokenWhitespace, Data: " "},
3472 {Type: TokenWord, Data: "a"},
3473 {Type: TokenWhitespace, Data: " "},
3474 {Type: TokenBinaryOperator, Data: "="},
3475 {Type: TokenWhitespace, Data: " "},
3476 {Type: parser.TokenError, Data: "invalid character"},
3477 },
3478 },
3479 { // 211
3480 "[[ a -net b ]]",
3481 []parser.Token{
3482 {Type: TokenKeyword, Data: "[["},
3483 {Type: TokenWhitespace, Data: " "},
3484 {Type: TokenWord, Data: "a"},
3485 {Type: TokenWhitespace, Data: " "},
3486 {Type: parser.TokenError, Data: "invalid operator"},
3487 },
3488 },
3489 { // 212
3490 "[[ -f file ]]\n[[ ! -e file\"str\" ]];[[ -S \"str\"a",
3491 []parser.Token{
3492 {Type: TokenKeyword, Data: "[["},
3493 {Type: TokenWhitespace, Data: " "},
3494 {Type: TokenKeyword, Data: "-f"},
3495 {Type: TokenWhitespace, Data: " "},
3496 {Type: TokenWord, Data: "file"},
3497 {Type: TokenWhitespace, Data: " "},
3498 {Type: TokenKeyword, Data: "]]"},
3499 {Type: TokenLineTerminator, Data: "\n"},
3500 {Type: TokenKeyword, Data: "[["},
3501 {Type: TokenWhitespace, Data: " "},
3502 {Type: TokenPunctuator, Data: "!"},
3503 {Type: TokenWhitespace, Data: " "},
3504 {Type: TokenKeyword, Data: "-e"},
3505 {Type: TokenWhitespace, Data: " "},
3506 {Type: TokenWord, Data: "file"},
3507 {Type: TokenString, Data: "\"str\""},
3508 {Type: TokenWhitespace, Data: " "},
3509 {Type: TokenKeyword, Data: "]]"},
3510 {Type: TokenPunctuator, Data: ";"},
3511 {Type: TokenKeyword, Data: "[["},
3512 {Type: TokenWhitespace, Data: " "},
3513 {Type: TokenKeyword, Data: "-S"},
3514 {Type: TokenWhitespace, Data: " "},
3515 {Type: TokenString, Data: "\"str\""},
3516 {Type: TokenWord, Data: "a"},
3517 {Type: parser.TokenError, Data: "unexpected EOF"},
3518 },
3519 },
3520 { // 213
3521 "\"",
3522 []parser.Token{
3523 {Type: parser.TokenError, Data: "unexpected EOF"},
3524 },
3525 },
3526 { // 214
3527 "$((",
3528 []parser.Token{
3529 {Type: TokenPunctuator, Data: "$(("},
3530 {Type: parser.TokenError, Data: "unexpected EOF"},
3531 },
3532 },
3533 { // 215
3534 "$(( \"1\" ))",
3535 []parser.Token{
3536 {Type: TokenPunctuator, Data: "$(("},
3537 {Type: TokenWhitespace, Data: " "},
3538 {Type: TokenString, Data: "\"1\""},
3539 {Type: TokenWhitespace, Data: " "},
3540 {Type: TokenPunctuator, Data: "))"},
3541 {Type: parser.TokenDone, Data: ""},
3542 },
3543 },
3544 { // 216
3545 "$(( : ))",
3546 []parser.Token{
3547 {Type: TokenPunctuator, Data: "$(("},
3548 {Type: TokenWhitespace, Data: " "},
3549 {Type: parser.TokenError, Data: "invalid character"},
3550 },
3551 },
3552 { // 217
3553 "$(( ; ))",
3554 []parser.Token{
3555 {Type: TokenPunctuator, Data: "$(("},
3556 {Type: TokenWhitespace, Data: " "},
3557 {Type: parser.TokenError, Data: "invalid character"},
3558 },
3559 },
3560 { // 218
3561 "`\\",
3562 []parser.Token{
3563 {Type: TokenOpenBacktick, Data: "`"},
3564 {Type: parser.TokenError, Data: "incorrect backtick depth"},
3565 },
3566 },
3567 { // 219
3568 "<<",
3569 []parser.Token{
3570 {Type: TokenPunctuator, Data: "<<"},
3571 {Type: parser.TokenError, Data: "unexpected EOF"},
3572 },
3573 },
3574 { // 220
3575 "<<a",
3576 []parser.Token{
3577 {Type: TokenPunctuator, Data: "<<"},
3578 {Type: parser.TokenError, Data: "unexpected EOF"},
3579 },
3580 },
3581 { // 221
3582 "<<a\\n\\tc\n123\na\n\tc",
3583 []parser.Token{
3584 {Type: TokenPunctuator, Data: "<<"},
3585 {Type: TokenWord, Data: "a\\n\\tc"},
3586 {Type: TokenLineTerminator, Data: "\n"},
3587 {Type: TokenHeredoc, Data: "123\n"},
3588 {Type: TokenHeredocEnd, Data: "a\n\tc"},
3589 {Type: parser.TokenDone, Data: ""},
3590 },
3591 },
3592 { // 222
3593 "<<abc\n123",
3594 []parser.Token{
3595 {Type: TokenPunctuator, Data: "<<"},
3596 {Type: TokenWord, Data: "abc"},
3597 {Type: TokenLineTerminator, Data: "\n"},
3598 {Type: parser.TokenError, Data: "unexpected EOF"},
3599 },
3600 },
3601 { // 223
3602 "<<abc\n123$\nabc",
3603 []parser.Token{
3604 {Type: TokenPunctuator, Data: "<<"},
3605 {Type: TokenWord, Data: "abc"},
3606 {Type: TokenLineTerminator, Data: "\n"},
3607 {Type: TokenHeredoc, Data: "123$\n"},
3608 {Type: TokenHeredocEnd, Data: "abc"},
3609 {Type: parser.TokenDone, Data: ""},
3610 },
3611 },
3612 { // 224
3613 "${a!}",
3614 []parser.Token{
3615 {Type: TokenPunctuator, Data: "${"},
3616 {Type: TokenIdentifier, Data: "a"},
3617 {Type: parser.TokenError, Data: "invalid parameter expansion"},
3618 },
3619 },
3620 { // 225
3621 "${a:b}",
3622 []parser.Token{
3623 {Type: TokenPunctuator, Data: "${"},
3624 {Type: TokenIdentifier, Data: "a"},
3625 {Type: TokenPunctuator, Data: ":"},
3626 {Type: TokenWord, Data: "b"},
3627 {Type: TokenPunctuator, Data: "}"},
3628 {Type: parser.TokenDone, Data: ""},
3629 },
3630 },
3631 { // 226
3632 "${a:1:b}",
3633 []parser.Token{
3634 {Type: TokenPunctuator, Data: "${"},
3635 {Type: TokenIdentifier, Data: "a"},
3636 {Type: TokenPunctuator, Data: ":"},
3637 {Type: TokenWord, Data: "1"},
3638 {Type: TokenPunctuator, Data: ":"},
3639 {Type: TokenWord, Data: "b"},
3640 {Type: TokenPunctuator, Data: "}"},
3641 {Type: parser.TokenDone, Data: ""},
3642 },
3643 },
3644 { // 227
3645 "${a/(}",
3646 []parser.Token{
3647 {Type: TokenPunctuator, Data: "${"},
3648 {Type: TokenIdentifier, Data: "a"},
3649 {Type: TokenPunctuator, Data: "/"},
3650 {Type: parser.TokenError, Data: "invalid character"},
3651 },
3652 },
3653 { // 228
3654 "${a/",
3655 []parser.Token{
3656 {Type: TokenPunctuator, Data: "${"},
3657 {Type: TokenIdentifier, Data: "a"},
3658 {Type: TokenPunctuator, Data: "/"},
3659 {Type: parser.TokenError, Data: "unexpected EOF"},
3660 },
3661 },
3662 { // 229
3663 "${a/)}",
3664 []parser.Token{
3665 {Type: TokenPunctuator, Data: "${"},
3666 {Type: TokenIdentifier, Data: "a"},
3667 {Type: TokenPunctuator, Data: "/"},
3668 {Type: parser.TokenError, Data: "invalid character"},
3669 },
3670 },
3671 { // 230
3672 "${a/b[\\t]+/c}",
3673 []parser.Token{
3674 {Type: TokenPunctuator, Data: "${"},
3675 {Type: TokenIdentifier, Data: "a"},
3676 {Type: TokenPunctuator, Data: "/"},
3677 {Type: TokenPattern, Data: "b[\\t]+"},
3678 {Type: TokenPunctuator, Data: "/"},
3679 {Type: TokenWord, Data: "c"},
3680 {Type: TokenPunctuator, Data: "}"},
3681 {Type: parser.TokenDone, Data: ""},
3682 },
3683 },
3684 { // 231
3685 "${a-b}",
3686 []parser.Token{
3687 {Type: TokenPunctuator, Data: "${"},
3688 {Type: TokenIdentifier, Data: "a"},
3689 {Type: TokenPunctuator, Data: "-"},
3690 {Type: TokenWord, Data: "b"},
3691 {Type: TokenPunctuator, Data: "}"},
3692 {Type: parser.TokenDone, Data: ""},
3693 },
3694 },
3695 { // 232
3696 "${a+b}",
3697 []parser.Token{
3698 {Type: TokenPunctuator, Data: "${"},
3699 {Type: TokenIdentifier, Data: "a"},
3700 {Type: TokenPunctuator, Data: "+"},
3701 {Type: TokenWord, Data: "b"},
3702 {Type: TokenPunctuator, Data: "}"},
3703 {Type: parser.TokenDone, Data: ""},
3704 },
3705 },
3706 { // 233
3707 "${a=b}",
3708 []parser.Token{
3709 {Type: TokenPunctuator, Data: "${"},
3710 {Type: TokenIdentifier, Data: "a"},
3711 {Type: TokenPunctuator, Data: "="},
3712 {Type: TokenWord, Data: "b"},
3713 {Type: TokenPunctuator, Data: "}"},
3714 {Type: parser.TokenDone, Data: ""},
3715 },
3716 },
3717 { // 234
3718 "${a?b}",
3719 []parser.Token{
3720 {Type: TokenPunctuator, Data: "${"},
3721 {Type: TokenIdentifier, Data: "a"},
3722 {Type: TokenPunctuator, Data: "?"},
3723 {Type: TokenWord, Data: "b"},
3724 {Type: TokenPunctuator, Data: "}"},
3725 {Type: parser.TokenDone, Data: ""},
3726 },
3727 },
3728 { // 235
3729 "$(( 0x\"1\" ))",
3730 []parser.Token{
3731 {Type: TokenPunctuator, Data: "$(("},
3732 {Type: TokenWhitespace, Data: " "},
3733 {Type: TokenWord, Data: "0x"},
3734 {Type: TokenString, Data: "\"1\""},
3735 {Type: TokenWhitespace, Data: " "},
3736 {Type: TokenPunctuator, Data: "))"},
3737 {Type: parser.TokenDone, Data: ""},
3738 },
3739 },
3740 { // 236
3741 "$(( 1#2 ))",
3742 []parser.Token{
3743 {Type: TokenPunctuator, Data: "$(("},
3744 {Type: TokenWhitespace, Data: " "},
3745 {Type: TokenNumberLiteral, Data: "1#2"},
3746 {Type: TokenWhitespace, Data: " "},
3747 {Type: TokenPunctuator, Data: "))"},
3748 {Type: parser.TokenDone, Data: ""},
3749 },
3750 },
3751 { // 237
3752 "$(( 1#$a ))",
3753 []parser.Token{
3754 {Type: TokenPunctuator, Data: "$(("},
3755 {Type: TokenWhitespace, Data: " "},
3756 {Type: TokenWord, Data: "1#"},
3757 {Type: TokenIdentifier, Data: "$a"},
3758 {Type: TokenWhitespace, Data: " "},
3759 {Type: TokenPunctuator, Data: "))"},
3760 {Type: parser.TokenDone, Data: ""},
3761 },
3762 },
3763 { // 238
3764 "$(( $a#2 ))",
3765 []parser.Token{
3766 {Type: TokenPunctuator, Data: "$(("},
3767 {Type: TokenWhitespace, Data: " "},
3768 {Type: TokenIdentifier, Data: "$a#2"},
3769 {Type: TokenWhitespace, Data: " "},
3770 {Type: TokenPunctuator, Data: "))"},
3771 {Type: parser.TokenDone, Data: ""},
3772 },
3773 },
3774 { // 239
3775 "function a time",
3776 []parser.Token{
3777 {Type: TokenKeyword, Data: "function"},
3778 {Type: TokenWhitespace, Data: " "},
3779 {Type: TokenFunctionIdentifier, Data: "a"},
3780 {Type: TokenWhitespace, Data: " "},
3781 {Type: parser.TokenError, Data: "invalid keyword"},
3782 },
3783 },
3784 { // 240
3785 "then",
3786 []parser.Token{
3787 {Type: parser.TokenError, Data: "invalid keyword"},
3788 },
3789 },
3790 { // 241
3791 "in",
3792 []parser.Token{
3793 {Type: parser.TokenError, Data: "invalid keyword"},
3794 },
3795 },
3796 { // 242
3797 "do",
3798 []parser.Token{
3799 {Type: parser.TokenError, Data: "invalid keyword"},
3800 },
3801 },
3802 { // 243
3803 "elif",
3804 []parser.Token{
3805 {Type: parser.TokenError, Data: "invalid keyword"},
3806 },
3807 },
3808 { // 244
3809 "else",
3810 []parser.Token{
3811 {Type: parser.TokenError, Data: "invalid keyword"},
3812 },
3813 },
3814 { // 245
3815 "fi",
3816 []parser.Token{
3817 {Type: parser.TokenError, Data: "invalid keyword"},
3818 },
3819 },
3820 { // 246
3821 "done",
3822 []parser.Token{
3823 {Type: parser.TokenError, Data: "invalid keyword"},
3824 },
3825 },
3826 { // 247
3827 "esac",
3828 []parser.Token{
3829 {Type: parser.TokenError, Data: "invalid keyword"},
3830 },
3831 },
3832 { // 248
3833 "function a coproc",
3834 []parser.Token{
3835 {Type: TokenKeyword, Data: "function"},
3836 {Type: TokenWhitespace, Data: " "},
3837 {Type: TokenFunctionIdentifier, Data: "a"},
3838 {Type: TokenWhitespace, Data: " "},
3839 {Type: parser.TokenError, Data: "invalid keyword"},
3840 },
3841 },
3842 { // 249
3843 "function a function",
3844 []parser.Token{
3845 {Type: TokenKeyword, Data: "function"},
3846 {Type: TokenWhitespace, Data: " "},
3847 {Type: TokenFunctionIdentifier, Data: "a"},
3848 {Type: TokenWhitespace, Data: " "},
3849 {Type: parser.TokenError, Data: "invalid keyword"},
3850 },
3851 },
3852 { // 250
3853 "function a(\n) { echo b; }",
3854 []parser.Token{
3855 {Type: TokenKeyword, Data: "function"},
3856 {Type: TokenWhitespace, Data: " "},
3857 {Type: TokenFunctionIdentifier, Data: "a"},
3858 {Type: TokenPunctuator, Data: "("},
3859 {Type: parser.TokenError, Data: "missing closing paren"},
3860 },
3861 },
3862 { // 251
3863 "function (\n) { echo b; }",
3864 []parser.Token{
3865 {Type: TokenKeyword, Data: "function"},
3866 {Type: TokenWhitespace, Data: " "},
3867 {Type: parser.TokenError, Data: "invalid identifier"},
3868 },
3869 },
3870 { // 252
3871 "function a()",
3872 []parser.Token{
3873 {Type: TokenKeyword, Data: "function"},
3874 {Type: TokenWhitespace, Data: " "},
3875 {Type: TokenFunctionIdentifier, Data: "a"},
3876 {Type: TokenPunctuator, Data: "("},
3877 {Type: TokenPunctuator, Data: ")"},
3878 {Type: parser.TokenError, Data: "unexpected EOF"},
3879 },
3880 },
3881 { // 253
3882 "select %; do b; done",
3883 []parser.Token{
3884 {Type: TokenKeyword, Data: "select"},
3885 {Type: TokenWhitespace, Data: " "},
3886 {Type: parser.TokenError, Data: "invalid identifier"},
3887 },
3888 },
3889 { // 254
3890 "declare a=b",
3891 []parser.Token{
3892 {Type: TokenBuiltin, Data: "declare"},
3893 {Type: TokenWhitespace, Data: " "},
3894 {Type: TokenIdentifierAssign, Data: "a"},
3895 {Type: TokenAssignment, Data: "="},
3896 {Type: TokenWord, Data: "b"},
3897 {Type: parser.TokenDone, Data: ""},
3898 },
3899 },
3900 { // 255
3901 "local -a a=b c=d",
3902 []parser.Token{
3903 {Type: TokenBuiltin, Data: "local"},
3904 {Type: TokenWhitespace, Data: " "},
3905 {Type: TokenOperator, Data: "-a"},
3906 {Type: TokenWhitespace, Data: " "},
3907 {Type: TokenIdentifierAssign, Data: "a"},
3908 {Type: TokenAssignment, Data: "="},
3909 {Type: TokenWord, Data: "b"},
3910 {Type: TokenWhitespace, Data: " "},
3911 {Type: TokenIdentifierAssign, Data: "c"},
3912 {Type: TokenAssignment, Data: "="},
3913 {Type: TokenWord, Data: "d"},
3914 {Type: parser.TokenDone, Data: ""},
3915 },
3916 },
3917 { // 256
3918 "typeset -aFx a=b",
3919 []parser.Token{
3920 {Type: TokenBuiltin, Data: "typeset"},
3921 {Type: TokenWhitespace, Data: " "},
3922 {Type: TokenOperator, Data: "-aFx"},
3923 {Type: TokenWhitespace, Data: " "},
3924 {Type: TokenIdentifierAssign, Data: "a"},
3925 {Type: TokenAssignment, Data: "="},
3926 {Type: TokenWord, Data: "b"},
3927 {Type: parser.TokenDone, Data: ""},
3928 },
3929 },
3930 { // 257
3931 "readonly -A -p -f a=b",
3932 []parser.Token{
3933 {Type: TokenBuiltin, Data: "readonly"},
3934 {Type: TokenWhitespace, Data: " "},
3935 {Type: TokenOperator, Data: "-A"},
3936 {Type: TokenWhitespace, Data: " "},
3937 {Type: TokenOperator, Data: "-p"},
3938 {Type: TokenWhitespace, Data: " "},
3939 {Type: TokenOperator, Data: "-f"},
3940 {Type: TokenWhitespace, Data: " "},
3941 {Type: TokenIdentifierAssign, Data: "a"},
3942 {Type: TokenAssignment, Data: "="},
3943 {Type: TokenWord, Data: "b"},
3944 {Type: parser.TokenDone, Data: ""},
3945 },
3946 },
3947 { // 258
3948 "export -n -1 a",
3949 []parser.Token{
3950 {Type: TokenBuiltin, Data: "export"},
3951 {Type: TokenWhitespace, Data: " "},
3952 {Type: TokenOperator, Data: "-n"},
3953 {Type: TokenWhitespace, Data: " "},
3954 {Type: parser.TokenError, Data: "invalid character"},
3955 },
3956 },
3957 { // 259
3958 "let a=1",
3959 []parser.Token{
3960 {Type: TokenBuiltin, Data: "let"},
3961 {Type: TokenWhitespace, Data: " "},
3962 {Type: TokenLetIdentifierAssign, Data: "a"},
3963 {Type: TokenAssignment, Data: "="},
3964 {Type: TokenNumberLiteral, Data: "1"},
3965 {Type: parser.TokenDone, Data: ""},
3966 },
3967 },
3968 { // 260
3969 "let a=(1)",
3970 []parser.Token{
3971 {Type: TokenBuiltin, Data: "let"},
3972 {Type: TokenWhitespace, Data: " "},
3973 {Type: TokenLetIdentifierAssign, Data: "a"},
3974 {Type: TokenAssignment, Data: "="},
3975 {Type: TokenPunctuator, Data: "("},
3976 {Type: TokenNumberLiteral, Data: "1"},
3977 {Type: TokenPunctuator, Data: ")"},
3978 {Type: parser.TokenDone, Data: ""},
3979 },
3980 },
3981 { // 261
3982 "let a=( 1 );",
3983 []parser.Token{
3984 {Type: TokenBuiltin, Data: "let"},
3985 {Type: TokenWhitespace, Data: " "},
3986 {Type: TokenLetIdentifierAssign, Data: "a"},
3987 {Type: TokenAssignment, Data: "="},
3988 {Type: TokenPunctuator, Data: "("},
3989 {Type: TokenWhitespace, Data: " "},
3990 {Type: TokenNumberLiteral, Data: "1"},
3991 {Type: TokenWhitespace, Data: " "},
3992 {Type: TokenPunctuator, Data: ")"},
3993 {Type: TokenPunctuator, Data: ";"},
3994 {Type: parser.TokenDone, Data: ""},
3995 },
3996 },
3997 { // 262
3998 "let a=1+2 b=2*3",
3999 []parser.Token{
4000 {Type: TokenBuiltin, Data: "let"},
4001 {Type: TokenWhitespace, Data: " "},
4002 {Type: TokenLetIdentifierAssign, Data: "a"},
4003 {Type: TokenAssignment, Data: "="},
4004 {Type: TokenNumberLiteral, Data: "1"},
4005 {Type: TokenPunctuator, Data: "+"},
4006 {Type: TokenNumberLiteral, Data: "2"},
4007 {Type: TokenWhitespace, Data: " "},
4008 {Type: TokenLetIdentifierAssign, Data: "b"},
4009 {Type: TokenAssignment, Data: "="},
4010 {Type: TokenNumberLiteral, Data: "2"},
4011 {Type: TokenPunctuator, Data: "*"},
4012 {Type: TokenNumberLiteral, Data: "3"},
4013 {Type: parser.TokenDone, Data: ""},
4014 },
4015 },
4016 { // 263
4017 "let a=b?c:d",
4018 []parser.Token{
4019 {Type: TokenBuiltin, Data: "let"},
4020 {Type: TokenWhitespace, Data: " "},
4021 {Type: TokenLetIdentifierAssign, Data: "a"},
4022 {Type: TokenAssignment, Data: "="},
4023 {Type: TokenWord, Data: "b"},
4024 {Type: TokenPunctuator, Data: "?"},
4025 {Type: TokenWord, Data: "c"},
4026 {Type: TokenPunctuator, Data: ":"},
4027 {Type: TokenWord, Data: "d"},
4028 {Type: parser.TokenDone, Data: ""},
4029 },
4030 },
4031 { // 264
4032 "let a=b ? c : d",
4033 []parser.Token{
4034 {Type: TokenBuiltin, Data: "let"},
4035 {Type: TokenWhitespace, Data: " "},
4036 {Type: TokenLetIdentifierAssign, Data: "a"},
4037 {Type: TokenAssignment, Data: "="},
4038 {Type: TokenWord, Data: "b"},
4039 {Type: TokenWhitespace, Data: " "},
4040 {Type: TokenWord, Data: "?"},
4041 {Type: TokenWhitespace, Data: " "},
4042 {Type: TokenWord, Data: "c"},
4043 {Type: TokenWhitespace, Data: " "},
4044 {Type: TokenWord, Data: ":"},
4045 {Type: TokenWhitespace, Data: " "},
4046 {Type: TokenWord, Data: "d"},
4047 {Type: parser.TokenDone, Data: ""},
4048 },
4049 },
4050 { // 265
4051 "let a=( b ? c : d )",
4052 []parser.Token{
4053 {Type: TokenBuiltin, Data: "let"},
4054 {Type: TokenWhitespace, Data: " "},
4055 {Type: TokenLetIdentifierAssign, Data: "a"},
4056 {Type: TokenAssignment, Data: "="},
4057 {Type: TokenPunctuator, Data: "("},
4058 {Type: TokenWhitespace, Data: " "},
4059 {Type: TokenWord, Data: "b"},
4060 {Type: TokenWhitespace, Data: " "},
4061 {Type: TokenPunctuator, Data: "?"},
4062 {Type: TokenWhitespace, Data: " "},
4063 {Type: TokenWord, Data: "c"},
4064 {Type: TokenWhitespace, Data: " "},
4065 {Type: TokenPunctuator, Data: ":"},
4066 {Type: TokenWhitespace, Data: " "},
4067 {Type: TokenWord, Data: "d"},
4068 {Type: TokenWhitespace, Data: " "},
4069 {Type: TokenPunctuator, Data: ")"},
4070 {Type: parser.TokenDone, Data: ""},
4071 },
4072 },
4073 { // 266
4074 "let a={b",
4075 []parser.Token{
4076 {Type: TokenBuiltin, Data: "let"},
4077 {Type: TokenWhitespace, Data: " "},
4078 {Type: TokenLetIdentifierAssign, Data: "a"},
4079 {Type: TokenAssignment, Data: "="},
4080 {Type: parser.TokenError, Data: "invalid character"},
4081 },
4082 },
4083 { // 267
4084 "let a={b..c}",
4085 []parser.Token{
4086 {Type: TokenBuiltin, Data: "let"},
4087 {Type: TokenWhitespace, Data: " "},
4088 {Type: TokenLetIdentifierAssign, Data: "a"},
4089 {Type: TokenAssignment, Data: "="},
4090 {Type: TokenBraceSequenceExpansion, Data: "{"},
4091 {Type: TokenWord, Data: "b"},
4092 {Type: TokenPunctuator, Data: ".."},
4093 {Type: TokenWord, Data: "c"},
4094 {Type: TokenBraceExpansion, Data: "}"},
4095 {Type: parser.TokenDone, Data: ""},
4096 },
4097 },
4098 { // 268
4099 "let a=(b + c{d..e})",
4100 []parser.Token{
4101 {Type: TokenBuiltin, Data: "let"},
4102 {Type: TokenWhitespace, Data: " "},
4103 {Type: TokenLetIdentifierAssign, Data: "a"},
4104 {Type: TokenAssignment, Data: "="},
4105 {Type: TokenPunctuator, Data: "("},
4106 {Type: TokenWord, Data: "b"},
4107 {Type: TokenWhitespace, Data: " "},
4108 {Type: TokenPunctuator, Data: "+"},
4109 {Type: TokenWhitespace, Data: " "},
4110 {Type: TokenWord, Data: "c"},
4111 {Type: TokenBraceSequenceExpansion, Data: "{"},
4112 {Type: TokenWord, Data: "d"},
4113 {Type: TokenPunctuator, Data: ".."},
4114 {Type: TokenWord, Data: "e"},
4115 {Type: TokenBraceExpansion, Data: "}"},
4116 {Type: TokenPunctuator, Data: ")"},
4117 {Type: parser.TokenDone, Data: ""},
4118 },
4119 },
4120 { // 269
4121 "let \"a++\"",
4122 []parser.Token{
4123 {Type: TokenBuiltin, Data: "let"},
4124 {Type: TokenWhitespace, Data: " "},
4125 {Type: TokenString, Data: "\"a++\""},
4126 {Type: parser.TokenDone, Data: ""},
4127 },
4128 },
4129 { // 270
4130 "a=()",
4131 []parser.Token{
4132 {Type: TokenIdentifierAssign, Data: "a"},
4133 {Type: TokenAssignment, Data: "="},
4134 {Type: TokenPunctuator, Data: "("},
4135 {Type: TokenPunctuator, Data: ")"},
4136 {Type: parser.TokenDone, Data: ""},
4137 },
4138 },
4139 { // 271
4140 "a=(b c)",
4141 []parser.Token{
4142 {Type: TokenIdentifierAssign, Data: "a"},
4143 {Type: TokenAssignment, Data: "="},
4144 {Type: TokenPunctuator, Data: "("},
4145 {Type: TokenWord, Data: "b"},
4146 {Type: TokenWhitespace, Data: " "},
4147 {Type: TokenWord, Data: "c"},
4148 {Type: TokenPunctuator, Data: ")"},
4149 {Type: parser.TokenDone, Data: ""},
4150 },
4151 },
4152 { // 272
4153 "a=([b]=c [d]=e)",
4154 []parser.Token{
4155 {Type: TokenIdentifierAssign, Data: "a"},
4156 {Type: TokenAssignment, Data: "="},
4157 {Type: TokenPunctuator, Data: "("},
4158 {Type: TokenWord, Data: "[b]=c"},
4159 {Type: TokenWhitespace, Data: " "},
4160 {Type: TokenWord, Data: "[d]=e"},
4161 {Type: TokenPunctuator, Data: ")"},
4162 {Type: parser.TokenDone, Data: ""},
4163 },
4164 },
4165 { // 273
4166 "a[ b]=1 c",
4167 []parser.Token{
4168 {Type: TokenIdentifierAssign, Data: "a"},
4169 {Type: TokenPunctuator, Data: "["},
4170 {Type: TokenWhitespace, Data: " "},
4171 {Type: TokenWord, Data: "b"},
4172 {Type: TokenPunctuator, Data: "]"},
4173 {Type: TokenAssignment, Data: "="},
4174 {Type: TokenWord, Data: "1"},
4175 {Type: TokenWhitespace, Data: " "},
4176 {Type: TokenWord, Data: "c"},
4177 {Type: parser.TokenDone, Data: ""},
4178 },
4179 },
4180 { // 274
4181 "a b[ c]=1",
4182 []parser.Token{
4183 {Type: TokenWord, Data: "a"},
4184 {Type: TokenWhitespace, Data: " "},
4185 {Type: TokenWord, Data: "b["},
4186 {Type: TokenWhitespace, Data: " "},
4187 {Type: TokenWord, Data: "c]=1"},
4188 {Type: parser.TokenDone, Data: ""},
4189 },
4190 },
4191 { // 275
4192 "( #comment\n)",
4193 []parser.Token{
4194 {Type: TokenPunctuator, Data: "("},
4195 {Type: TokenWhitespace, Data: " "},
4196 {Type: TokenComment, Data: "#comment"},
4197 {Type: TokenLineTerminator, Data: "\n"},
4198 {Type: TokenPunctuator, Data: ")"},
4199 {Type: parser.TokenDone, Data: ""},
4200 },
4201 },
4202 { // 276
4203 "{ #comment\n}",
4204 []parser.Token{
4205 {Type: TokenPunctuator, Data: "{"},
4206 {Type: TokenWhitespace, Data: " "},
4207 {Type: TokenComment, Data: "#comment"},
4208 {Type: TokenLineTerminator, Data: "\n"},
4209 {Type: TokenPunctuator, Data: "}"},
4210 {Type: parser.TokenDone, Data: ""},
4211 },
4212 },
4213 { // 277
4214 "(( #comment\n))",
4215 []parser.Token{
4216 {Type: TokenPunctuator, Data: "(("},
4217 {Type: TokenWhitespace, Data: " "},
4218 {Type: parser.TokenError, Data: "invalid character"},
4219 },
4220 },
4221 { // 278
4222 "a[",
4223 []parser.Token{
4224 {Type: TokenIdentifierAssign, Data: "a"},
4225 {Type: TokenPunctuator, Data: "["},
4226 {Type: parser.TokenError, Data: "unexpected EOF"},
4227 },
4228 },
4229 { // 279
4230 "a[b]c[",
4231 []parser.Token{
4232 {Type: TokenWord, Data: "a"},
4233 {Type: TokenPunctuator, Data: "["},
4234 {Type: TokenWord, Data: "b"},
4235 {Type: TokenPunctuator, Data: "]"},
4236 {Type: TokenWord, Data: "c["},
4237 {Type: parser.TokenDone, Data: ""},
4238 },
4239 },
4240 { // 280
4241 "a[b]=c d[",
4242 []parser.Token{
4243 {Type: TokenIdentifierAssign, Data: "a"},
4244 {Type: TokenPunctuator, Data: "["},
4245 {Type: TokenWord, Data: "b"},
4246 {Type: TokenPunctuator, Data: "]"},
4247 {Type: TokenAssignment, Data: "="},
4248 {Type: TokenWord, Data: "c"},
4249 {Type: TokenWhitespace, Data: " "},
4250 {Type: TokenIdentifierAssign, Data: "d"},
4251 {Type: TokenPunctuator, Data: "["},
4252 {Type: parser.TokenError, Data: "unexpected EOF"},
4253 },
4254 },
4255 { // 281
4256 "a[$b+1]=c",
4257 []parser.Token{
4258 {Type: TokenIdentifierAssign, Data: "a"},
4259 {Type: TokenPunctuator, Data: "["},
4260 {Type: TokenIdentifier, Data: "$b"},
4261 {Type: TokenPunctuator, Data: "+"},
4262 {Type: TokenNumberLiteral, Data: "1"},
4263 {Type: TokenPunctuator, Data: "]"},
4264 {Type: TokenAssignment, Data: "="},
4265 {Type: TokenWord, Data: "c"},
4266 {Type: parser.TokenDone, Data: ""},
4267 },
4268 },
4269 { // 282
4270 "a[b]=( 1 )",
4271 []parser.Token{
4272 {Type: TokenIdentifierAssign, Data: "a"},
4273 {Type: TokenPunctuator, Data: "["},
4274 {Type: TokenWord, Data: "b"},
4275 {Type: TokenPunctuator, Data: "]"},
4276 {Type: TokenAssignment, Data: "="},
4277 {Type: parser.TokenError, Data: "invalid character"},
4278 },
4279 },
4280 { // 283
4281 "a[ b ]",
4282 []parser.Token{
4283 {Type: TokenWord, Data: "a"},
4284 {Type: TokenPunctuator, Data: "["},
4285 {Type: TokenWord, Data: " "},
4286 {Type: TokenWord, Data: "b"},
4287 {Type: TokenWord, Data: " "},
4288 {Type: TokenPunctuator, Data: "]"},
4289 {Type: parser.TokenDone, Data: ""},
4290 },
4291 },
4292 { // 284
4293 "a[\nb\n]",
4294 []parser.Token{
4295 {Type: TokenWord, Data: "a"},
4296 {Type: TokenPunctuator, Data: "["},
4297 {Type: TokenWord, Data: "\n"},
4298 {Type: TokenWord, Data: "b"},
4299 {Type: TokenWord, Data: "\n"},
4300 {Type: TokenPunctuator, Data: "]"},
4301 {Type: parser.TokenDone, Data: ""},
4302 },
4303 },
4304 { // 285
4305 "a[ b ][ c ]",
4306 []parser.Token{
4307 {Type: TokenWord, Data: "a"},
4308 {Type: TokenPunctuator, Data: "["},
4309 {Type: TokenWord, Data: " "},
4310 {Type: TokenWord, Data: "b"},
4311 {Type: TokenWord, Data: " "},
4312 {Type: TokenPunctuator, Data: "]"},
4313 {Type: TokenWord, Data: "["},
4314 {Type: TokenWhitespace, Data: " "},
4315 {Type: TokenWord, Data: "c"},
4316 {Type: TokenWhitespace, Data: " "},
4317 {Type: TokenWord, Data: "]"},
4318 {Type: parser.TokenDone, Data: ""},
4319 },
4320 },
4321 { // 286
4322 "a b[",
4323 []parser.Token{
4324 {Type: TokenWord, Data: "a"},
4325 {Type: TokenWhitespace, Data: " "},
4326 {Type: TokenWord, Data: "b["},
4327 {Type: parser.TokenDone, Data: ""},
4328 },
4329 },
4330 { // 287
4331 "a[b]",
4332 []parser.Token{
4333 {Type: TokenWord, Data: "a"},
4334 {Type: TokenPunctuator, Data: "["},
4335 {Type: TokenWord, Data: "b"},
4336 {Type: TokenPunctuator, Data: "]"},
4337 {Type: parser.TokenDone, Data: ""},
4338 },
4339 },
4340 { // 288
4341 "a[b;]",
4342 []parser.Token{
4343 {Type: TokenWord, Data: "a"},
4344 {Type: TokenPunctuator, Data: "["},
4345 {Type: TokenWord, Data: "b;"},
4346 {Type: TokenPunctuator, Data: "]"},
4347 {Type: parser.TokenDone, Data: ""},
4348 },
4349 },
4350 { // 289
4351 "a[b;]=",
4352 []parser.Token{
4353 {Type: TokenIdentifierAssign, Data: "a"},
4354 {Type: TokenPunctuator, Data: "["},
4355 {Type: TokenWord, Data: "b"},
4356 {Type: parser.TokenError, Data: "invalid character"},
4357 },
4358 },
4359 { // 290
4360 "a[b{]",
4361 []parser.Token{
4362 {Type: TokenWord, Data: "a"},
4363 {Type: TokenPunctuator, Data: "["},
4364 {Type: TokenWord, Data: "b{"},
4365 {Type: TokenPunctuator, Data: "]"},
4366 {Type: parser.TokenDone, Data: ""},
4367 },
4368 },
4369 { // 291
4370 "a[b{]=",
4371 []parser.Token{
4372 {Type: TokenIdentifierAssign, Data: "a"},
4373 {Type: TokenPunctuator, Data: "["},
4374 {Type: TokenWord, Data: "b"},
4375 {Type: parser.TokenError, Data: "invalid character"},
4376 },
4377 },
4378 { // 292
4379 "a[b}]",
4380 []parser.Token{
4381 {Type: TokenWord, Data: "a"},
4382 {Type: TokenPunctuator, Data: "["},
4383 {Type: TokenWord, Data: "b"},
4384 {Type: TokenPunctuator, Data: "}"},
4385 {Type: TokenPunctuator, Data: "]"},
4386 {Type: parser.TokenDone, Data: ""},
4387 },
4388 },
4389 { // 293
4390 "a[b}]=",
4391 []parser.Token{
4392 {Type: TokenIdentifierAssign, Data: "a"},
4393 {Type: TokenPunctuator, Data: "["},
4394 {Type: TokenWord, Data: "b"},
4395 {Type: parser.TokenError, Data: "invalid character"},
4396 },
4397 },
4398 { // 294
4399 "a[b#]",
4400 []parser.Token{
4401 {Type: TokenWord, Data: "a"},
4402 {Type: TokenPunctuator, Data: "["},
4403 {Type: TokenWord, Data: "b#"},
4404 {Type: TokenPunctuator, Data: "]"},
4405 {Type: parser.TokenDone, Data: ""},
4406 },
4407 },
4408 { // 295
4409 "a[b #]",
4410 []parser.Token{
4411 {Type: TokenWord, Data: "a"},
4412 {Type: TokenPunctuator, Data: "["},
4413 {Type: TokenWord, Data: "b"},
4414 {Type: TokenWord, Data: " "},
4415 {Type: TokenWord, Data: "#"},
4416 {Type: TokenPunctuator, Data: "]"},
4417 {Type: parser.TokenDone, Data: ""},
4418 },
4419 },
4420 { // 296
4421 "a[b #]=",
4422 []parser.Token{
4423 {Type: TokenIdentifierAssign, Data: "a"},
4424 {Type: TokenPunctuator, Data: "["},
4425 {Type: TokenWord, Data: "b"},
4426 {Type: TokenWhitespace, Data: " "},
4427 {Type: parser.TokenError, Data: "invalid character"},
4428 },
4429 },
4430 { // 297
4431 "a[b #]+",
4432 []parser.Token{
4433 {Type: TokenWord, Data: "a"},
4434 {Type: TokenPunctuator, Data: "["},
4435 {Type: TokenWord, Data: "b"},
4436 {Type: TokenWord, Data: " "},
4437 {Type: TokenWord, Data: "#"},
4438 {Type: TokenPunctuator, Data: "]"},
4439 {Type: TokenWord, Data: "+"},
4440 {Type: parser.TokenDone, Data: ""},
4441 },
4442 },
4443 { // 298
4444 "a[b #]+=",
4445 []parser.Token{
4446 {Type: TokenIdentifierAssign, Data: "a"},
4447 {Type: TokenPunctuator, Data: "["},
4448 {Type: TokenWord, Data: "b"},
4449 {Type: TokenWhitespace, Data: " "},
4450 {Type: parser.TokenError, Data: "invalid character"},
4451 },
4452 },
4453 { // 299
4454 "a b[",
4455 []parser.Token{
4456 {Type: TokenWord, Data: "a"},
4457 {Type: TokenWhitespace, Data: " "},
4458 {Type: TokenWord, Data: "b["},
4459 {Type: parser.TokenDone, Data: ""},
4460 },
4461 },
4462 { // 300
4463 "a b[c]",
4464 []parser.Token{
4465 {Type: TokenWord, Data: "a"},
4466 {Type: TokenWhitespace, Data: " "},
4467 {Type: TokenWord, Data: "b[c]"},
4468 {Type: parser.TokenDone, Data: ""},
4469 },
4470 },
4471 { // 301
4472 "a b[c ]",
4473 []parser.Token{
4474 {Type: TokenWord, Data: "a"},
4475 {Type: TokenWhitespace, Data: " "},
4476 {Type: TokenWord, Data: "b[c"},
4477 {Type: TokenWhitespace, Data: " "},
4478 {Type: TokenWord, Data: "]"},
4479 {Type: parser.TokenDone, Data: ""},
4480 },
4481 },
4482 { // 302
4483 "a b[c]=1",
4484 []parser.Token{
4485 {Type: TokenWord, Data: "a"},
4486 {Type: TokenWhitespace, Data: " "},
4487 {Type: TokenIdentifierAssign, Data: "b"},
4488 {Type: TokenPunctuator, Data: "["},
4489 {Type: TokenWord, Data: "c"},
4490 {Type: TokenPunctuator, Data: "]"},
4491 {Type: TokenAssignment, Data: "="},
4492 {Type: TokenWord, Data: "1"},
4493 {Type: parser.TokenDone, Data: ""},
4494 },
4495 },
4496 { // 303
4497 "a=(( 1 ))",
4498 []parser.Token{
4499 {Type: TokenIdentifierAssign, Data: "a"},
4500 {Type: TokenAssignment, Data: "="},
4501 {Type: parser.TokenError, Data: "invalid character"},
4502 },
4503 },
4504 { // 304
4505 "a=",
4506 []parser.Token{
4507 {Type: TokenIdentifierAssign, Data: "a"},
4508 {Type: TokenAssignment, Data: "="},
4509 {Type: parser.TokenDone, Data: ""},
4510 },
4511 },
4512 { // 305
4513 "a=;",
4514 []parser.Token{
4515 {Type: TokenIdentifierAssign, Data: "a"},
4516 {Type: TokenAssignment, Data: "="},
4517 {Type: TokenPunctuator, Data: ";"},
4518 {Type: parser.TokenDone, Data: ""},
4519 },
4520 },
4521 { // 306
4522 "a= b",
4523 []parser.Token{
4524 {Type: TokenIdentifierAssign, Data: "a"},
4525 {Type: TokenAssignment, Data: "="},
4526 {Type: TokenWhitespace, Data: " "},
4527 {Type: TokenWord, Data: "b"},
4528 {Type: parser.TokenDone, Data: ""},
4529 },
4530 },
4531 { // 307
4532 "a[$(b)]=",
4533 []parser.Token{
4534 {Type: TokenIdentifierAssign, Data: "a"},
4535 {Type: TokenPunctuator, Data: "["},
4536 {Type: TokenPunctuator, Data: "$("},
4537 {Type: TokenWord, Data: "b"},
4538 {Type: TokenPunctuator, Data: ")"},
4539 {Type: TokenPunctuator, Data: "]"},
4540 {Type: TokenAssignment, Data: "="},
4541 {Type: parser.TokenDone, Data: ""},
4542 },
4543 },
4544 { // 308
4545 "{ function a() { b; } }",
4546 []parser.Token{
4547 {Type: TokenPunctuator, Data: "{"},
4548 {Type: TokenWhitespace, Data: " "},
4549 {Type: TokenKeyword, Data: "function"},
4550 {Type: TokenWhitespace, Data: " "},
4551 {Type: TokenFunctionIdentifier, Data: "a"},
4552 {Type: TokenPunctuator, Data: "("},
4553 {Type: TokenPunctuator, Data: ")"},
4554 {Type: TokenWhitespace, Data: " "},
4555 {Type: TokenPunctuator, Data: "{"},
4556 {Type: TokenWhitespace, Data: " "},
4557 {Type: TokenWord, Data: "b"},
4558 {Type: TokenPunctuator, Data: ";"},
4559 {Type: TokenWhitespace, Data: " "},
4560 {Type: TokenPunctuator, Data: "}"},
4561 {Type: TokenWhitespace, Data: " "},
4562 {Type: TokenPunctuator, Data: "}"},
4563 {Type: parser.TokenDone, Data: ""},
4564 },
4565 },
4566 { // 309
4567 "[ \"$a\" = \"b\" ]",
4568 []parser.Token{
4569 {Type: TokenWord, Data: "["},
4570 {Type: TokenWhitespace, Data: " "},
4571 {Type: TokenStringStart, Data: "\""},
4572 {Type: TokenIdentifier, Data: "$a"},
4573 {Type: TokenStringEnd, Data: "\""},
4574 {Type: TokenWhitespace, Data: " "},
4575 {Type: TokenWord, Data: "="},
4576 {Type: TokenWhitespace, Data: " "},
4577 {Type: TokenString, Data: "\"b\""},
4578 {Type: TokenWhitespace, Data: " "},
4579 {Type: TokenWord, Data: "]"},
4580 {Type: parser.TokenDone, Data: ""},
4581 },
4582 },
4583 { // 310
4584 "for a in $(seq $(( ( b - c ) / 2 ))); do d; done",
4585 []parser.Token{
4586 {Type: TokenKeyword, Data: "for"},
4587 {Type: TokenWhitespace, Data: " "},
4588 {Type: TokenIdentifier, Data: "a"},
4589 {Type: TokenWhitespace, Data: " "},
4590 {Type: TokenKeyword, Data: "in"},
4591 {Type: TokenWhitespace, Data: " "},
4592 {Type: TokenPunctuator, Data: "$("},
4593 {Type: TokenWord, Data: "seq"},
4594 {Type: TokenWhitespace, Data: " "},
4595 {Type: TokenPunctuator, Data: "$(("},
4596 {Type: TokenWhitespace, Data: " "},
4597 {Type: TokenPunctuator, Data: "("},
4598 {Type: TokenWhitespace, Data: " "},
4599 {Type: TokenWord, Data: "b"},
4600 {Type: TokenWhitespace, Data: " "},
4601 {Type: TokenPunctuator, Data: "-"},
4602 {Type: TokenWhitespace, Data: " "},
4603 {Type: TokenWord, Data: "c"},
4604 {Type: TokenWhitespace, Data: " "},
4605 {Type: TokenPunctuator, Data: ")"},
4606 {Type: TokenWhitespace, Data: " "},
4607 {Type: TokenPunctuator, Data: "/"},
4608 {Type: TokenWhitespace, Data: " "},
4609 {Type: TokenNumberLiteral, Data: "2"},
4610 {Type: TokenWhitespace, Data: " "},
4611 {Type: TokenPunctuator, Data: "))"},
4612 {Type: TokenPunctuator, Data: ")"},
4613 {Type: TokenPunctuator, Data: ";"},
4614 {Type: TokenWhitespace, Data: " "},
4615 {Type: TokenKeyword, Data: "do"},
4616 {Type: TokenWhitespace, Data: " "},
4617 {Type: TokenWord, Data: "d"},
4618 {Type: TokenPunctuator, Data: ";"},
4619 {Type: TokenWhitespace, Data: " "},
4620 {Type: TokenKeyword, Data: "done"},
4621 },
4622 },
4623 { // 311
4624 "a=b= c",
4625 []parser.Token{
4626 {Type: TokenIdentifierAssign, Data: "a"},
4627 {Type: TokenAssignment, Data: "="},
4628 {Type: TokenIdentifierAssign, Data: "b"},
4629 {Type: TokenAssignment, Data: "="},
4630 {Type: TokenWhitespace, Data: " "},
4631 {Type: TokenWord, Data: "c"},
4632 {Type: parser.TokenDone, Data: ""},
4633 },
4634 },
4635 { // 312
4636 "${a:${#b}}",
4637 []parser.Token{
4638 {Type: TokenPunctuator, Data: "${"},
4639 {Type: TokenIdentifier, Data: "a"},
4640 {Type: TokenPunctuator, Data: ":"},
4641 {Type: TokenPunctuator, Data: "${"},
4642 {Type: TokenPunctuator, Data: "#"},
4643 {Type: TokenIdentifier, Data: "b"},
4644 {Type: TokenPunctuator, Data: "}"},
4645 {Type: TokenPunctuator, Data: "}"},
4646 {Type: parser.TokenDone, Data: ""},
4647 },
4648 },
4649 { // 313
4650 "${a:${#b}:$c}",
4651 []parser.Token{
4652 {Type: TokenPunctuator, Data: "${"},
4653 {Type: TokenIdentifier, Data: "a"},
4654 {Type: TokenPunctuator, Data: ":"},
4655 {Type: TokenPunctuator, Data: "${"},
4656 {Type: TokenPunctuator, Data: "#"},
4657 {Type: TokenIdentifier, Data: "b"},
4658 {Type: TokenPunctuator, Data: "}"},
4659 {Type: TokenPunctuator, Data: ":"},
4660 {Type: TokenIdentifier, Data: "$c"},
4661 {Type: TokenPunctuator, Data: "}"},
4662 {Type: parser.TokenDone, Data: ""},
4663 },
4664 },
4665 } {
4666 p := parser.NewStringTokeniser(test.Input)
4667
4668 SetTokeniser(&p)
4669
4670 for m, tkn := range test.Output {
4671 if tk, _ := p.GetToken(); tk.Type != tkn.Type {
4672 if tk.Type == parser.TokenError {
4673 t.Errorf("test %d.%d: unexpected error: %s", n+1, m+1, tk.Data)
4674 } else {
4675 t.Errorf("test %d.%d: Incorrect type, expecting %d, got %d", n+1, m+1, tkn.Type, tk.Type)
4676 }
4677
4678 break
4679 } else if tk.Data != tkn.Data {
4680 t.Errorf("test %d.%d: Incorrect data, expecting %q, got %q", n+1, m+1, tkn.Data, tk.Data)
4681
4682 break
4683 }
4684 }
4685 }
4686 }
4687