bash - tokeniser_test.go

package bash

import (
	"testing"

	"vimagination.zapto.org/parser"
)

func TestTokeniser(t *testing.T) {
	for n, test := range [...]struct {
		Input  string
		Output []parser.Token
	}{
		{ // 1
			"",
			[]parser.Token{
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 2
			" ",
			[]parser.Token{
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 3
			" \t\\\n",
			[]parser.Token{
				{Type: TokenWhitespace, Data: " \t\\\n"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 4
			"\\\n \t",
			[]parser.Token{
				{Type: TokenWhitespace, Data: "\\\n \t"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 5
			" \n\n \n",
			[]parser.Token{
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenLineTerminator, Data: "\n\n"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 6
			"#A comment\n# B comment",
			[]parser.Token{
				{Type: TokenComment, Data: "#A comment"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenComment, Data: "# B comment"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 7
			"$ident $name a\\nbc=a $0 $12 a$b a${b}c $$ $! $?",
			[]parser.Token{
				{Type: TokenIdentifier, Data: "$ident"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "$name"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a\\nbc=a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "$0"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "$1"},
				{Type: TokenWord, Data: "2"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenIdentifier, Data: "$b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "b"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWord, Data: "c"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "$$"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "$!"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "$?"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 8
			"abc=a def[0]=b ghi[$i]=c jkl+=d",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "abc"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifierAssign, Data: "def"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenNumberLiteral, Data: "0"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifierAssign, Data: "ghi"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenIdentifier, Data: "$i"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWord, Data: "c"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifierAssign, Data: "jkl"},
				{Type: TokenPunctuator, Data: "+="},
				{Type: TokenWord, Data: "d"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 9
			"ident ${name} ab\\nc=a ${6} a$ ",
			[]parser.Token{
				{Type: TokenWord, Data: "ident"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "name"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "ab\\nc=a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenNumberLiteral, Data: "6"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a$"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 10
			"$(( 0 1 29 0xff 0xDeAdBeEf 0755 2#5 ))",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "$(("},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenNumberLiteral, Data: "0"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenNumberLiteral, Data: "1"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenNumberLiteral, Data: "29"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenNumberLiteral, Data: "0xff"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenNumberLiteral, Data: "0xDeAdBeEf"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenNumberLiteral, Data: "0755"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenNumberLiteral, Data: "2#5"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "))"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 11
			"\"abc\" \"de\\nf\" \"stuff`command`more stuff\" \"text $ident $another end\" \"text $(command) end - text ${ident} end\" \"with\nnewline\" 'with\nnewline' $\"a string\" $'a \\'string'",
			[]parser.Token{
				{Type: TokenString, Data: "\"abc\""},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenString, Data: "\"de\\nf\""},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenStringStart, Data: "\"stuff"},
				{Type: TokenOpenBacktick, Data: "`"},
				{Type: TokenWord, Data: "command"},
				{Type: TokenCloseBacktick, Data: "`"},
				{Type: TokenStringEnd, Data: "more stuff\""},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenStringStart, Data: "\"text "},
				{Type: TokenIdentifier, Data: "$ident"},
				{Type: TokenStringMid, Data: " "},
				{Type: TokenIdentifier, Data: "$another"},
				{Type: TokenStringEnd, Data: " end\""},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenStringStart, Data: "\"text "},
				{Type: TokenPunctuator, Data: "$("},
				{Type: TokenWord, Data: "command"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenStringMid, Data: " end - text "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "ident"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenStringEnd, Data: " end\""},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenString, Data: "\"with\nnewline\""},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenString, Data: "'with\nnewline'"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenString, Data: "$\"a string\""},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenString, Data: "$'a \\'string'"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 12
			"< <<< <& <> > >> >& &>> >| | |& || & && {} + = += `` $() $(()) (())",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "<<<"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "<&"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "<>"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ">"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ">>"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ">&"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "&>>"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ">|"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "|"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "|&"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "||"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "&"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "&&"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "{"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "+"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "+="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenOpenBacktick, Data: "`"},
				{Type: TokenCloseBacktick, Data: "`"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "$("},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "$(("},
				{Type: TokenPunctuator, Data: "))"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 13
			"$(( + += - -= & &= | |= < <= > >= = == ! != * *= ** / /= % %= ^ ^= ~ ? : , (1) ))",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "$(("},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "+"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "+="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "-"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "-="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "&"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "&="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "|"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "|="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "<"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "<="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ">"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ">="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "=="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "!"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "!="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "*"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "*="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "**"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "/="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "%"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "%="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "^"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "^="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "~"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "?"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ":"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ","},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenNumberLiteral, Data: "1"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "))"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 14
			"$(( a+b 1+2))",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "$(("},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "+"},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenNumberLiteral, Data: "1"},
				{Type: TokenPunctuator, Data: "+"},
				{Type: TokenNumberLiteral, Data: "2"},
				{Type: TokenPunctuator, Data: "))"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 15
			"(( a+b 1+2))",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "(("},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "+"},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenNumberLiteral, Data: "1"},
				{Type: TokenPunctuator, Data: "+"},
				{Type: TokenNumberLiteral, Data: "2"},
				{Type: TokenPunctuator, Data: "))"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 16
			"$(( ( ))",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "$(("},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ")"},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 17
			"$(( ? ))",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "$(("},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "?"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 18
			"$(( ] ))",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "$(("},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 19
			"{ )",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "{"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 20
			"(",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "("},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 21
			"a b(",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 22
			"$(",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "$("},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 23
			"$(}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "$("},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 24
			"<<abc\n123\n456\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "123\n456\n"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 25
			"<< abc\n123\n456\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "123\n456\n"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 26
			"<<-abc\n123\n456\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<-"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "123\n456\n"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 27
			"<<-abc\n\t123\n\t\t456\n\t\t\tabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<-"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredocIndent, Data: "\t"},
				{Type: TokenHeredoc, Data: "123\n"},
				{Type: TokenHeredocIndent, Data: "\t\t"},
				{Type: TokenHeredoc, Data: "456\n"},
				{Type: TokenHeredocIndent, Data: "\t\t\t"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 28
			"<<a'b 'c\n123\n456\nab c\n",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "a'b 'c"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "123\n456\n"},
				{Type: TokenHeredocEnd, Data: "ab c"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 29
			"<<def\n123\n456\ndef\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "def"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "123\n456\n"},
				{Type: TokenHeredocEnd, Data: "def"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenWord, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 30
			"<<def cat\n123\n456\ndef\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "def"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "cat"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "123\n456\n"},
				{Type: TokenHeredocEnd, Data: "def"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenWord, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 31
			"<<abc cat;<<def cat\n123\nabc\n456\ndef",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "cat"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "def"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "cat"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "123\n"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "456\n"},
				{Type: TokenHeredocEnd, Data: "def"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 32
			"<<abc cat;echo $(<<def cat\n456\ndef\n)\n123\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "cat"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWord, Data: "echo"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "$("},
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "def"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "cat"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "456\n"},
				{Type: TokenHeredocEnd, Data: "def"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "123\n"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 33
			"<<abc\na$abc\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "a"},
				{Type: TokenIdentifier, Data: "$abc"},
				{Type: TokenHeredoc, Data: "\n"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 34
			"<<'abc'\na$abc\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "'abc'"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "a$abc\n"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 35
			"<<\"\"abc\na$abc\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "\"\"abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "a$abc\n"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 36
			"<<a\\ b\\ c\na$abc\na b c",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "a\\ b\\ c"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "a$abc\n"},
				{Type: TokenHeredocEnd, Data: "a b c"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 37
			"<<abc\na${abc} $99\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "a"},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "abc"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenHeredoc, Data: " "},
				{Type: TokenIdentifier, Data: "$9"},
				{Type: TokenHeredoc, Data: "9\n"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 38
			"<<abc\na$(\necho abc;\n) 1\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "a"},
				{Type: TokenPunctuator, Data: "$("},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenWord, Data: "echo"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenHeredoc, Data: " 1\n"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 39
			"<<abc\n$a\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenIdentifier, Data: "$a"},
				{Type: TokenHeredoc, Data: "\n"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 40
			"<<abc\n$$\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenIdentifier, Data: "$$"},
				{Type: TokenHeredoc, Data: "\n"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 41
			"<<abc\n$!\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenIdentifier, Data: "$!"},
				{Type: TokenHeredoc, Data: "\n"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 42
			"<<abc\n$?\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenIdentifier, Data: "$?"},
				{Type: TokenHeredoc, Data: "\n"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 43
			"<<abc\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 44
			"<<abc\na$(<<def) 1\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "a"},
				{Type: TokenPunctuator, Data: "$("},
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "def"},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 45
			"<<abc\na$(<<def cat) 1\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "a"},
				{Type: TokenPunctuator, Data: "$("},
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "def"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "cat"},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 46
			"<<abc;$(<<def cat)\nabc\ndef\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenPunctuator, Data: "$("},
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "def"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "cat"},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 47
			"<<abc;<<def;$(<<ghi;<<jkl\nghi\njkl\n)\nabc\ndef",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "def"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenPunctuator, Data: "$("},
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "ghi"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "jkl"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredocEnd, Data: "ghi"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredocEnd, Data: "jkl"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredocEnd, Data: "def"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 48
			"<<a\\\nbc\nabc\ndef\na\nbc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "a\\\nbc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "abc\ndef\n"},
				{Type: TokenHeredocEnd, Data: "a\nbc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 49
			"<<a;echo ${a/b/\n$c #not-a-comment $d}\n123\na",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWord, Data: "echo"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenIdentifier, Data: "$c"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "#not-a-comment"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "$d"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "123\n"},
				{Type: TokenHeredocEnd, Data: "a"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 50
			"2>1 word",
			[]parser.Token{
				{Type: TokenNumberLiteral, Data: "2"},
				{Type: TokenPunctuator, Data: ">"},
				{Type: TokenWord, Data: "1"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "word"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 51
			"time -p cmd",
			[]parser.Token{
				{Type: TokenKeyword, Data: "time"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "-p"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "cmd"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 52
			"time -p cmd if",
			[]parser.Token{
				{Type: TokenKeyword, Data: "time"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "-p"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "cmd"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "if"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 53
			"time -p if a;then b;fi",
			[]parser.Token{
				{Type: TokenKeyword, Data: "time"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "-p"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "if"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenKeyword, Data: "then"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenKeyword, Data: "fi"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 54
			"{a..b..2} {a,b,d} a{b,c,d}e a{1..4} {2..10..-1} {-1..-100..5} {a..z..-1}",
			[]parser.Token{
				{Type: TokenBraceExpansion, Data: "{a..b..2}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBraceExpansion, Data: "{a,b,d}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenBraceExpansion, Data: "{b,c,d}"},
				{Type: TokenWord, Data: "e"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenBraceExpansion, Data: "{1..4}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBraceExpansion, Data: "{2..10..-1}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBraceExpansion, Data: "{-1..-100..5}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBraceExpansion, Data: "{a..z..-1}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 55
			"{a..2}",
			[]parser.Token{
				{Type: TokenWord, Data: "{a..2}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 56
			"{a..b..c}",
			[]parser.Token{
				{Type: TokenWord, Data: "{a..b..c}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 57
			"{a..b2}",
			[]parser.Token{
				{Type: TokenWord, Data: "{a..b2}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 58
			"{_a,_b,_c}",
			[]parser.Token{
				{Type: TokenBraceExpansion, Data: "{_a,_b,_c}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 59
			"{1,2,3}",
			[]parser.Token{
				{Type: TokenBraceExpansion, Data: "{1,2,3}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 60
			"{1..}",
			[]parser.Token{
				{Type: TokenWord, Data: "{1..}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 61
			"{1..3..a}",
			[]parser.Token{
				{Type: TokenWord, Data: "{1..3..a}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 62
			"{1..3..1a}",
			[]parser.Token{
				{Type: TokenWord, Data: "{1..3..1a}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 63
			"{-",
			[]parser.Token{
				{Type: TokenWord, Data: "{-"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 64
			"{-,_}",
			[]parser.Token{
				{Type: TokenBraceExpansion, Data: "{-,_}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 65
			"{-\\n}",
			[]parser.Token{
				{Type: TokenWord, Data: "{-\\n}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 66
			"{-,\\n}",
			[]parser.Token{
				{Type: TokenBraceExpansion, Data: "{-,\\n}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 67
			"a={123",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWord, Data: "{123"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 68
			"word{ word{a} word{\nword{",
			[]parser.Token{
				{Type: TokenWord, Data: "word{"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "word{a}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "word{"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenWord, Data: "word{"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 69
			"{ echo 123; echo 456; }",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "{"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "echo"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "123"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "echo"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "456"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 70
			"(echo 123; echo 456)",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWord, Data: "echo"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "123"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "echo"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "456"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 71
			"`a` `echo \\`abc\\`` echo \"a`echo \"1\\`echo u\\\\\\`echo 123\\\\\\`v\\`3\"`c\"",
			[]parser.Token{
				{Type: TokenOpenBacktick, Data: "`"},
				{Type: TokenWord, Data: "a"},
				{Type: TokenCloseBacktick, Data: "`"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenOpenBacktick, Data: "`"},
				{Type: TokenWord, Data: "echo"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenOpenBacktick, Data: "\\`"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenCloseBacktick, Data: "\\`"},
				{Type: TokenCloseBacktick, Data: "`"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "echo"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenStringStart, Data: "\"a"},
				{Type: TokenOpenBacktick, Data: "`"},
				{Type: TokenWord, Data: "echo"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenStringStart, Data: "\"1"},
				{Type: TokenOpenBacktick, Data: "\\`"},
				{Type: TokenWord, Data: "echo"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "u"},
				{Type: TokenOpenBacktick, Data: "\\\\\\`"},
				{Type: TokenWord, Data: "echo"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "123"},
				{Type: TokenCloseBacktick, Data: "\\\\\\`"},
				{Type: TokenWord, Data: "v"},
				{Type: TokenCloseBacktick, Data: "\\`"},
				{Type: TokenStringEnd, Data: "3\""},
				{Type: TokenCloseBacktick, Data: "`"},
				{Type: TokenStringEnd, Data: "c\""},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 72
			"`\\``",
			[]parser.Token{
				{Type: TokenOpenBacktick, Data: "`"},
				{Type: TokenOpenBacktick, Data: "\\`"},
				{Type: parser.TokenError, Data: "incorrect backtick depth"},
			},
		},
		{ // 73
			"`\\`\\\\\\``",
			[]parser.Token{
				{Type: TokenOpenBacktick, Data: "`"},
				{Type: TokenOpenBacktick, Data: "\\`"},
				{Type: TokenOpenBacktick, Data: "\\\\\\`"},
				{Type: parser.TokenError, Data: "incorrect backtick depth"},
			},
		},
		{ // 74
			"`\\`\\\\\\`\\`",
			[]parser.Token{
				{Type: TokenOpenBacktick, Data: "`"},
				{Type: TokenOpenBacktick, Data: "\\`"},
				{Type: TokenOpenBacktick, Data: "\\\\\\`"},
				{Type: parser.TokenError, Data: "incorrect backtick depth"},
			},
		},
		{ // 75
			"`\\$abc`",
			[]parser.Token{
				{Type: TokenOpenBacktick, Data: "`"},
				{Type: TokenIdentifier, Data: "\\$abc"},
				{Type: TokenCloseBacktick, Data: "`"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 76
			"echo `echo \\\"abc\\\"`",
			[]parser.Token{
				{Type: TokenWord, Data: "echo"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenOpenBacktick, Data: "`"},
				{Type: TokenWord, Data: "echo"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "\\\"abc\\\""},
				{Type: TokenCloseBacktick, Data: "`"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 77
			"\\\"abc\\\"",
			[]parser.Token{
				{Type: TokenWord, Data: "\\\"abc\\\""},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 78
			"\\",
			[]parser.Token{
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 79
			"{abc}>2",
			[]parser.Token{
				{Type: TokenBraceWord, Data: "{abc}"},
				{Type: TokenPunctuator, Data: ">"},
				{Type: TokenWord, Data: "2"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 80
			"<&1-",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<&"},
				{Type: TokenWord, Data: "1-"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 81
			"<(a)",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<("},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 82
			"a >(b)",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ">("},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 83
			": ${!a} ${!a*} ${!a@} ${!a[@]} ${!a[*]} ${a:1:2} ${a: -1 : -2} ${a:1} ${a:-b} ${a:=b} ${a:?a is unset} ${a:+a is set} ${#a} ${#} ${a#b} ${a##b} ${a%b} ${a%%b} ${a/b/c} ${a//b/c} ${a/#b/c} ${a/%b/c} ${a^b} ${a^^b} ${a,b} ${a,,b} ${a@Q} ${a@a} ${a@P}",
			[]parser.Token{
				{Type: TokenWord, Data: ":"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenPunctuator, Data: "!"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenPunctuator, Data: "!"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "*"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenPunctuator, Data: "!"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "@"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenPunctuator, Data: "!"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "@"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenPunctuator, Data: "!"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "*"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: ":"},
				{Type: TokenNumberLiteral, Data: "1"},
				{Type: TokenPunctuator, Data: ":"},
				{Type: TokenNumberLiteral, Data: "2"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: ":"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenNumberLiteral, Data: "-1"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ":"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenNumberLiteral, Data: "-2"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: ":"},
				{Type: TokenNumberLiteral, Data: "1"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: ":-"},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: ":="},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: ":?"},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "is"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "unset"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: ":+"},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "is"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "set"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenPunctuator, Data: "#"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenKeyword, Data: "#"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "#"},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "##"},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "%"},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "%%"},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "//"},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "/#"},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "/%"},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "^"},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "^^"},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: ","},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: ",,"},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "@"},
				{Type: TokenBraceWord, Data: "Q"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "@"},
				{Type: TokenBraceWord, Data: "a"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "@"},
				{Type: TokenBraceWord, Data: "P"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 84
			"${a[@@]}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "@"},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 85
			"${a/[/c}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 86
			"${a/\\[/c}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenPattern, Data: "\\["},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 87
			"${a/[b]/c}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenPattern, Data: "[b]"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 88
			"${a/(/c}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 89
			"${a/\\(/c}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenPattern, Data: "\\("},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 90
			"${a/(b)/c}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenPattern, Data: "(b)"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 91
			"${a@Z}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "@"},
				{Type: parser.TokenError, Data: "invalid parameter expansion"},
			},
		},
		{ // 92
			"${@} ${*}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenKeyword, Data: "@"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenKeyword, Data: "*"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 93
			"$() $(()) `` ${}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "$("},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "$(("},
				{Type: TokenPunctuator, Data: "))"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenOpenBacktick, Data: "`"},
				{Type: TokenCloseBacktick, Data: "`"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "${"},
				{Type: parser.TokenError, Data: "invalid parameter expansion"},
			},
		},
		{ // 94
			"case a in b)c;;esac",
			[]parser.Token{
				{Type: TokenKeyword, Data: "case"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "in"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: ";;"},
				{Type: TokenKeyword, Data: "esac"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 95
			"case a in b;;esac",
			[]parser.Token{
				{Type: TokenKeyword, Data: "case"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "in"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 96
			"case a in esac",
			[]parser.Token{
				{Type: TokenKeyword, Data: "case"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "in"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "esac"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 97
			"case a in #comment\nesac",
			[]parser.Token{
				{Type: TokenKeyword, Data: "case"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "in"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenComment, Data: "#comment"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenKeyword, Data: "esac"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 98
			"case a in b)c;;d)e;&f)g;;&h)i\nesac",
			[]parser.Token{
				{Type: TokenKeyword, Data: "case"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "in"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: ";;"},
				{Type: TokenWord, Data: "d"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWord, Data: "e"},
				{Type: TokenPunctuator, Data: ";&"},
				{Type: TokenWord, Data: "f"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWord, Data: "g"},
				{Type: TokenPunctuator, Data: ";;&"},
				{Type: TokenWord, Data: "h"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWord, Data: "i"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenKeyword, Data: "esac"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 99
			"case a in b) #comment\nc;; #comment\nd)e;&f)g\n#comment\nesac",
			[]parser.Token{
				{Type: TokenKeyword, Data: "case"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "in"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenComment, Data: "#comment"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: ";;"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenComment, Data: "#comment"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenWord, Data: "d"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWord, Data: "e"},
				{Type: TokenPunctuator, Data: ";&"},
				{Type: TokenWord, Data: "f"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWord, Data: "g"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenComment, Data: "#comment"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenKeyword, Data: "esac"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 100
			"case a b)c;;esac",
			[]parser.Token{
				{Type: TokenKeyword, Data: "case"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "missing in"},
			},
		},
		{ // 101
			"case a in b)c;;",
			[]parser.Token{
				{Type: TokenKeyword, Data: "case"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "in"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: ";;"},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 102
			"if a; then b; fi",
			[]parser.Token{
				{Type: TokenKeyword, Data: "if"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "then"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "fi"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 103
			"if a;\nthen\nb\nfi",
			[]parser.Token{
				{Type: TokenKeyword, Data: "if"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenKeyword, Data: "then"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenWord, Data: "b"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenKeyword, Data: "fi"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 104
			"if a && b || c & then d; fi",
			[]parser.Token{
				{Type: TokenKeyword, Data: "if"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "&&"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "||"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "c"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "&"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "then"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "d"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "fi"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 105
			"if a; then b; else c; fi",
			[]parser.Token{
				{Type: TokenKeyword, Data: "if"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "then"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "else"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "fi"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 106
			"if a #comment\n then b; else #comment\nc; fi",
			[]parser.Token{
				{Type: TokenKeyword, Data: "if"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenComment, Data: "#comment"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "then"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "else"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenComment, Data: "#comment"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "fi"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 107
			"if a\n#comment\n then b; else\n#comment\nc; fi",
			[]parser.Token{
				{Type: TokenKeyword, Data: "if"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenComment, Data: "#comment"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "then"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "else"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenComment, Data: "#comment"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "fi"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 108
			"if a; then b; elif c; then d; else if e; then f; fi; fi",
			[]parser.Token{
				{Type: TokenKeyword, Data: "if"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "then"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "elif"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "then"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "d"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "else"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "if"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "e"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "then"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "f"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "fi"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "fi"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 109
			"while a; do b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "while"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 110
			"while a; #comment\ndo b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "while"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenComment, Data: "#comment"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 111
			"until a && b || c & do b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "until"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "&&"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "||"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "c"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "&"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 112
			"while a; do break; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "while"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "break"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 113
			"until a; do continue; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "until"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "continue"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 114
			"break",
			[]parser.Token{
				{Type: parser.TokenError, Data: "invalid keyword"},
			},
		},
		{ // 115
			"continue",
			[]parser.Token{
				{Type: parser.TokenError, Data: "invalid keyword"},
			},
		},
		{ // 116
			"for a; do b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "for"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 117
			"for a #comment\ndo b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "for"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenComment, Data: "#comment"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 118
			"for a do b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "for"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 119
			"for a\ndo b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "for"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 120
			"for a in 1 2 3; do b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "for"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "in"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "1"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "2"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "3"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 121
			"for a in 1 2 3 #comment\ndo b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "for"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "in"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "1"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "2"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "3"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenComment, Data: "#comment"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 122
			"for a #comment\nin 1 2 3\ndo b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "for"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenComment, Data: "#comment"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenKeyword, Data: "in"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "1"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "2"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "3"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 123
			"for % in 1 2 3; do b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "for"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid identifier"},
			},
		},
		{ // 124
			"for a in 1 2 3 do b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "for"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "in"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "1"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "2"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "3"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "missing do"},
			},
		},
		{ // 125
			"for ((a=1;a<2;a++)) do b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "for"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "(("},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenNumberLiteral, Data: "1"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "<"},
				{Type: TokenNumberLiteral, Data: "2"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "++"},
				{Type: TokenPunctuator, Data: "))"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 126
			"for ((a=1;a<2;a++)); do b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "for"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "(("},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenNumberLiteral, Data: "1"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "<"},
				{Type: TokenNumberLiteral, Data: "2"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "++"},
				{Type: TokenPunctuator, Data: "))"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 127
			"for ( a=1;a<2;a++ ); do b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "for"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 128
			"select a; do b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "select"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 129
			"select a do b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "select"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 130
			"select a\ndo b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "select"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 131
			"select a in 1 2 3; do b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "select"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "in"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "1"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "2"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "3"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 132
			"select a in 1 2 3 do b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "select"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "in"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "1"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "2"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "3"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "missing do"},
			},
		},
		{ // 133

			"coproc a b",
			[]parser.Token{
				{Type: TokenKeyword, Data: "coproc"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 134

			"coproc fora b",
			[]parser.Token{
				{Type: TokenKeyword, Data: "coproc"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "fora"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 135
			"coproc while a; do b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "coproc"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "while"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 136
			"coproc a while b; do c; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "coproc"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "while"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "do"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "done"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 137
			"echo }",
			[]parser.Token{
				{Type: TokenWord, Data: "echo"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 138
			"{ echo }",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "{"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "echo"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 139
			"{ echo };}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "{"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "echo"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 140
			"function a{ b; }",
			[]parser.Token{
				{Type: TokenKeyword, Data: "function"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenFunctionIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "{"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 141
			"function a{ b; }\nfunction a\n{ b; }",
			[]parser.Token{
				{Type: TokenKeyword, Data: "function"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenFunctionIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "{"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenKeyword, Data: "function"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenFunctionIdentifier, Data: "a"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenPunctuator, Data: "{"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 142
			"function a\n{ b; }",
			[]parser.Token{
				{Type: TokenKeyword, Data: "function"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenFunctionIdentifier, Data: "a"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenPunctuator, Data: "{"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 143
			"function a(){ b; }",
			[]parser.Token{
				{Type: TokenKeyword, Data: "function"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenFunctionIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenPunctuator, Data: "{"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 144
			"function a ( ) { b; }",
			[]parser.Token{
				{Type: TokenKeyword, Data: "function"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenFunctionIdentifier, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "{"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 145
			"function a() b",
			[]parser.Token{
				{Type: TokenKeyword, Data: "function"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenFunctionIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid keyword"},
			},
		},
		{ // 146
			"a(){ b; }",
			[]parser.Token{
				{Type: TokenFunctionIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenPunctuator, Data: "{"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 147
			"a( ) { b; }",
			[]parser.Token{
				{Type: TokenFunctionIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "{"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 148
			"a() b",
			[]parser.Token{
				{Type: TokenFunctionIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid keyword"},
			},
		},
		{ // 149
			"a() b",
			[]parser.Token{
				{Type: TokenFunctionIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid keyword"},
			},
		},
		{ // 150
			"[[ -f file ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-f"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "file"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 151
			"[[ ! -e file\"str\" ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "!"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-e"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "file"},
				{Type: TokenString, Data: "\"str\""},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 152
			"[[ -S \"str\"a || -g $b\"c\"d ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-S"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenString, Data: "\"str\""},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "||"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-g"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "$b"},
				{Type: TokenString, Data: "\"c\""},
				{Type: TokenWord, Data: "d"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 153
			"[[ a = b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBinaryOperator, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 154
			"[[ a$b = c\"d\" && e\"f\"g != \"h\"$i ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenIdentifier, Data: "$b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBinaryOperator, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPattern, Data: "c"},
				{Type: TokenString, Data: "\"d\""},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "&&"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "e"},
				{Type: TokenString, Data: "\"f\""},
				{Type: TokenWord, Data: "g"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBinaryOperator, Data: "!="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenString, Data: "\"h\""},
				{Type: TokenIdentifier, Data: "$i"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 155
			"[[ a -gt b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-gt"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 156
			"[[ # A\n# B\n\n# C\na -gt b # D\n]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenComment, Data: "# A"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenComment, Data: "# B"},
				{Type: TokenLineTerminator, Data: "\n\n"},
				{Type: TokenComment, Data: "# C"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-gt"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenComment, Data: "# D"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 157
			"[[ a$b -eq c\"d\" && e\"f\"g -ne \"h\"$i ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenIdentifier, Data: "$b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-eq"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "c"},
				{Type: TokenString, Data: "\"d\""},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "&&"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "e"},
				{Type: TokenString, Data: "\"f\""},
				{Type: TokenWord, Data: "g"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-ne"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenString, Data: "\"h\""},
				{Type: TokenIdentifier, Data: "$i"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 158
			"[[ (a = b || c = d) && $e -le $f ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBinaryOperator, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "||"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "c"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBinaryOperator, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPattern, Data: "d"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "&&"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "$e"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-le"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifier, Data: "$f"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 159
			"[[ (a=b) ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWord, Data: "a=b"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 160
			"[[ a < b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBinaryOperator, Data: "<"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 161
			"[[ a<b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenBinaryOperator, Data: "<"},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 162
			"[[ (a = b) ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBinaryOperator, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 163
			"[[ (a -gt b) ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-gt"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 164
			"[[\na\n=\nb\n]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenWord, Data: "a"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenBinaryOperator, Data: "="},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 165
			"[[\n(a=b)\n]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWord, Data: "a=b"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 166
			"[[ ",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 167
			"[[ | = b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 168
			"[[ & = b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 169
			"[[ \"a\" = b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenString, Data: "\"a\""},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBinaryOperator, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 170
			"[[ ]]a = ]]b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "]]a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBinaryOperator, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPattern, Data: "]]b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 171
			"[[ ( a = ]]b ) ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBinaryOperator, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPattern, Data: "]]b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 172
			"[[ ) = ) ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 173
			"[[ ( ]] ) ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 174
			"[[ a \n= b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenBinaryOperator, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 175
			"[[ a\n = b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBinaryOperator, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 176
			"[[ a ",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 177
			"[[ a ! b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 178
			"[[ a -ez b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 179
			"[[ a -nz b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 180
			"[[ a -gz b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 181
			"[[ a -lz b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 182
			"[[ a -oz b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 183
			"[[ a -z b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 184
			"[[ -z < ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-z"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 185
			"[[ -z \n a ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-z"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 186
			"[[ a = b\\nc ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBinaryOperator, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPattern, Data: "b\\nc"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 187
			"[[ a = b",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBinaryOperator, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 188
			"[[ -z `a` ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-z"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenOpenBacktick, Data: "`"},
				{Type: TokenWord, Data: "a"},
				{Type: TokenCloseBacktick, Data: "`"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 189
			"[[ -z | ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-z"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 190
			"[[ -z a",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-z"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 191
			"[[ -z ",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-z"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 192
			"[[ -z #comment\na ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-z"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 193
			"[[ a -eq #comment\nb ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-eq"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 194
			"[[ #comment\na = b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenComment, Data: "#comment"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBinaryOperator, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 195
			"[[ ( a = b ) #comment\n ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBinaryOperator, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPattern, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenComment, Data: "#comment"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 196
			"[[ a = #comment\nb ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenBinaryOperator, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 197
			"[[ a -net b ]]",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid operator"},
			},
		},
		{ // 198
			"[[ -f file ]]\n[[ ! -e file\"str\" ]];[[ -S \"str\"a",
			[]parser.Token{
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-f"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "file"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "!"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-e"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "file"},
				{Type: TokenString, Data: "\"str\""},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "]]"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenKeyword, Data: "[["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "-S"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenString, Data: "\"str\""},
				{Type: TokenWord, Data: "a"},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 199
			"\"",
			[]parser.Token{
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 200
			"$((",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "$(("},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 201
			"$(( \"1\" ))",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "$(("},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenString, Data: "\"1\""},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "))"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 202
			"$(( : ))",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "$(("},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 203
			"$(( ; ))",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "$(("},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 204
			"`\\",
			[]parser.Token{
				{Type: TokenOpenBacktick, Data: "`"},
				{Type: parser.TokenError, Data: "incorrect backtick depth"},
			},
		},
		{ // 205
			"<<",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 206
			"<<a",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 207
			"<<a\\n\\tc\n123\na\n\tc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "a\\n\\tc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "123\n"},
				{Type: TokenHeredocEnd, Data: "a\n\tc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 208
			"<<abc\n123",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 209
			"<<abc\n123$\nabc",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "<<"},
				{Type: TokenWord, Data: "abc"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenHeredoc, Data: "123$\n"},
				{Type: TokenHeredocEnd, Data: "abc"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 210
			"${a!}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: parser.TokenError, Data: "invalid parameter expansion"},
			},
		},
		{ // 211
			"${a:b}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: ":"},
				{Type: parser.TokenError, Data: "invalid parameter expansion"},
			},
		},
		{ // 212
			"${a:1:b}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: ":"},
				{Type: TokenNumberLiteral, Data: "1"},
				{Type: TokenPunctuator, Data: ":"},
				{Type: parser.TokenError, Data: "invalid parameter expansion"},
			},
		},
		{ // 213
			"${a/(}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 214
			"${a/",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 215
			"${a/)}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 216
			"${a/b[\\t]+/c}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenPattern, Data: "b[\\t]+"},
				{Type: TokenPunctuator, Data: "/"},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 217
			"${a-b}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "-"},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 218
			"${a+b}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "+"},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 219
			"${a=b}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 220
			"${a?b}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "${"},
				{Type: TokenIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "?"},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 221
			"$(( 0x\"1\" ))",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "$(("},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "0x"},
				{Type: TokenString, Data: "\"1\""},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "))"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 222
			"$(( 2#, ))",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "$(("},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid number"},
			},
		},
		{ // 223
			"function a time",
			[]parser.Token{
				{Type: TokenKeyword, Data: "function"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenFunctionIdentifier, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid keyword"},
			},
		},
		{ // 224
			"then",
			[]parser.Token{
				{Type: parser.TokenError, Data: "invalid keyword"},
			},
		},
		{ // 225
			"in",
			[]parser.Token{
				{Type: parser.TokenError, Data: "invalid keyword"},
			},
		},
		{ // 226
			"do",
			[]parser.Token{
				{Type: parser.TokenError, Data: "invalid keyword"},
			},
		},
		{ // 227
			"elif",
			[]parser.Token{
				{Type: parser.TokenError, Data: "invalid keyword"},
			},
		},
		{ // 228
			"else",
			[]parser.Token{
				{Type: parser.TokenError, Data: "invalid keyword"},
			},
		},
		{ // 229
			"fi",
			[]parser.Token{
				{Type: parser.TokenError, Data: "invalid keyword"},
			},
		},
		{ // 230
			"done",
			[]parser.Token{
				{Type: parser.TokenError, Data: "invalid keyword"},
			},
		},
		{ // 231
			"esac",
			[]parser.Token{
				{Type: parser.TokenError, Data: "invalid keyword"},
			},
		},
		{ // 232
			"function a coproc",
			[]parser.Token{
				{Type: TokenKeyword, Data: "function"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenFunctionIdentifier, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid keyword"},
			},
		},
		{ // 233
			"function a function",
			[]parser.Token{
				{Type: TokenKeyword, Data: "function"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenFunctionIdentifier, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid keyword"},
			},
		},
		{ // 234
			"function a(\n) { echo b; }",
			[]parser.Token{
				{Type: TokenKeyword, Data: "function"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenFunctionIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "("},
				{Type: parser.TokenError, Data: "missing closing paren"},
			},
		},
		{ // 235
			"function (\n) { echo b; }",
			[]parser.Token{
				{Type: TokenKeyword, Data: "function"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid identifier"},
			},
		},
		{ // 236
			"function a()",
			[]parser.Token{
				{Type: TokenKeyword, Data: "function"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenFunctionIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenPunctuator, Data: ")"},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 237
			"select %; do b; done",
			[]parser.Token{
				{Type: TokenKeyword, Data: "select"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid identifier"},
			},
		},
		{ // 238
			"declare a=b",
			[]parser.Token{
				{Type: TokenBuiltin, Data: "declare"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWord, Data: "b"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 239
			"local -a a=b c=d",
			[]parser.Token{
				{Type: TokenBuiltin, Data: "local"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenOperator, Data: "-a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifierAssign, Data: "c"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWord, Data: "d"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 240
			"typeset -aFx a=b",
			[]parser.Token{
				{Type: TokenBuiltin, Data: "typeset"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenOperator, Data: "-aFx"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWord, Data: "b"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 241
			"readonly -A -p -f a=b",
			[]parser.Token{
				{Type: TokenBuiltin, Data: "readonly"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenOperator, Data: "-A"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenOperator, Data: "-p"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenOperator, Data: "-f"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWord, Data: "b"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 242
			"export -n -1 a",
			[]parser.Token{
				{Type: TokenBuiltin, Data: "export"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenOperator, Data: "-n"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 243
			"let a=1",
			[]parser.Token{
				{Type: TokenBuiltin, Data: "let"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenLetIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenNumberLiteral, Data: "1"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 244
			"let a=(1)",
			[]parser.Token{
				{Type: TokenBuiltin, Data: "let"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenLetIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenNumberLiteral, Data: "1"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 245
			"let a=( 1 );",
			[]parser.Token{
				{Type: TokenBuiltin, Data: "let"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenLetIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenNumberLiteral, Data: "1"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 246
			"let a=1+2 b=2*3",
			[]parser.Token{
				{Type: TokenBuiltin, Data: "let"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenLetIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenNumberLiteral, Data: "1"},
				{Type: TokenPunctuator, Data: "+"},
				{Type: TokenNumberLiteral, Data: "2"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenLetIdentifierAssign, Data: "b"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenNumberLiteral, Data: "2"},
				{Type: TokenPunctuator, Data: "*"},
				{Type: TokenNumberLiteral, Data: "3"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 247
			"let a=b?c:d",
			[]parser.Token{
				{Type: TokenBuiltin, Data: "let"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenLetIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: "?"},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: ":"},
				{Type: TokenWord, Data: "d"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 248
			"let a=b ? c : d",
			[]parser.Token{
				{Type: TokenBuiltin, Data: "let"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenLetIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "?"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "c"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: ":"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "d"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 249
			"let a=( b ? c : d )",
			[]parser.Token{
				{Type: TokenBuiltin, Data: "let"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenLetIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "?"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "c"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ":"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "d"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: ")"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 250
			"let a={b",
			[]parser.Token{
				{Type: TokenBuiltin, Data: "let"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenLetIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 251
			"let a={b..c}",
			[]parser.Token{
				{Type: TokenBuiltin, Data: "let"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenLetIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenBraceExpansion, Data: "{b..c}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 252
			"let a=(b + c{d..e})",
			[]parser.Token{
				{Type: TokenBuiltin, Data: "let"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenLetIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "+"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "c"},
				{Type: TokenBraceExpansion, Data: "{d..e}"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 253
			"a=()",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenPunctuator, Data: ")"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 254
			"a=(b c)",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 255
			"a=([b]=c [d]=e)",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWord, Data: "[b]=c"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "[d]=e"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 256
			"a[ b]=1 c",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWord, Data: "1"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "c"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 257
			"a b[ c]=1",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "c]=1"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 258
			"( #comment\n)",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenComment, Data: "#comment"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 259
			"{ #comment\n}",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "{"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenComment, Data: "#comment"},
				{Type: TokenLineTerminator, Data: "\n"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 260
			"(( #comment\n))",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "(("},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 261
			"a[",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 262
			"a[b]c[",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: TokenWord, Data: "c["},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 263
			"a[b]=c d[",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWord, Data: "c"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifierAssign, Data: "d"},
				{Type: TokenPunctuator, Data: "["},
				{Type: parser.TokenError, Data: "unexpected EOF"},
			},
		},
		{ // 264
			"a[$b+1]=c",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenIdentifier, Data: "$b"},
				{Type: TokenPunctuator, Data: "+"},
				{Type: TokenNumberLiteral, Data: "1"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWord, Data: "c"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 265
			"a[b]=( 1 )",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: TokenPunctuator, Data: "="},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 266
			"a[ b ]",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWord, Data: " "},
				{Type: TokenPunctuator, Data: "]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 267
			"a[\nb\n]",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "\n"},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWord, Data: "\n"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 268
			"a[ b ][ c ]",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWord, Data: " "},
				{Type: TokenPunctuator, Data: "]"},
				{Type: TokenWord, Data: "["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "c"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 269
			"a b[",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b["},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 270
			"a[b]",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 271
			"a[b;]",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "b;"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 272
			"a[b;]=",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "b"},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 273
			"a[b{]",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "b{"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 274
			"a[b{]=",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "b"},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 275
			"a[b}]",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 276
			"a[b}]=",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "b"},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 277
			"a[b#]",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "b#"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 278
			"a[b #]",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWord, Data: " "},
				{Type: TokenWord, Data: "#"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 279
			"a[b #]=",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 280
			"a[b #]+",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWord, Data: " "},
				{Type: TokenWord, Data: "#"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: TokenWord, Data: "+"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 281
			"a[b #]+=",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "b"},
				{Type: TokenWhitespace, Data: " "},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 282
			"a b[",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b["},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 283
			"a b[c]",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b[c]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 284
			"a b[c ]",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b[c"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 285
			"a b[c]=1",
			[]parser.Token{
				{Type: TokenWord, Data: "a"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenIdentifierAssign, Data: "b"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenWord, Data: "c"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWord, Data: "1"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 286
			"a=(( 1 ))",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: parser.TokenError, Data: "invalid character"},
			},
		},
		{ // 287
			"a=",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 288
			"a=;",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenPunctuator, Data: ";"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 289
			"a= b",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 290
			"a[$(b)]=",
			[]parser.Token{
				{Type: TokenIdentifierAssign, Data: "a"},
				{Type: TokenPunctuator, Data: "["},
				{Type: TokenPunctuator, Data: "$("},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenPunctuator, Data: "]"},
				{Type: TokenPunctuator, Data: "="},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 291
			"{ function a() { b; } }",
			[]parser.Token{
				{Type: TokenPunctuator, Data: "{"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenKeyword, Data: "function"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenFunctionIdentifier, Data: "a"},
				{Type: TokenPunctuator, Data: "("},
				{Type: TokenPunctuator, Data: ")"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "{"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "b"},
				{Type: TokenPunctuator, Data: ";"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "}"},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenPunctuator, Data: "}"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
		{ // 292
			"[ \"$a\" = \"b\" ]",
			[]parser.Token{
				{Type: TokenWord, Data: "["},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenStringStart, Data: "\""},
				{Type: TokenIdentifier, Data: "$a"},
				{Type: TokenStringEnd, Data: "\""},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "="},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenString, Data: "\"b\""},
				{Type: TokenWhitespace, Data: " "},
				{Type: TokenWord, Data: "]"},
				{Type: parser.TokenDone, Data: ""},
			},
		},
	} {
		p := parser.NewStringTokeniser(test.Input)

		SetTokeniser(&p)

		for m, tkn := range test.Output {
			if tk, _ := p.GetToken(); tk.Type != tkn.Type {
				if tk.Type == parser.TokenError {
					t.Errorf("test %d.%d: unexpected error: %s", n+1, m+1, tk.Data)
				} else {
					t.Errorf("test %d.%d: Incorrect type, expecting %d, got %d", n+1, m+1, tkn.Type, tk.Type)
				}

				break
			} else if tk.Data != tkn.Data {
				t.Errorf("test %d.%d: Incorrect data, expecting %q, got %q", n+1, m+1, tkn.Data, tk.Data)

				break
			}
		}
	}
}