1 package javascript 2 3 import ( 4 "testing" 5 6 "vimagination.zapto.org/parser" 7 ) 8 9 func TestTokeniser(t *testing.T) { 10 for n, test := range [...]struct { 11 Input string 12 Output []parser.Token 13 }{ 14 { // 1 15 "", 16 []parser.Token{ 17 {Type: parser.TokenDone, Data: ""}, 18 }, 19 }, 20 { // 2 21 " ", 22 []parser.Token{ 23 {Type: TokenWhitespace, Data: " "}, 24 {Type: parser.TokenDone, Data: ""}, 25 }, 26 }, 27 { // 3 28 " ", 29 []parser.Token{ 30 {Type: TokenWhitespace, Data: " "}, 31 {Type: parser.TokenDone, Data: ""}, 32 }, 33 }, 34 { // 4 35 " \n ", 36 []parser.Token{ 37 {Type: TokenWhitespace, Data: " "}, 38 {Type: TokenLineTerminator, Data: "\n"}, 39 {Type: TokenWhitespace, Data: " "}, 40 {Type: parser.TokenDone, Data: ""}, 41 }, 42 }, 43 { // 5 44 "\"\"", 45 []parser.Token{ 46 {Type: TokenStringLiteral, Data: "\"\""}, 47 {Type: parser.TokenDone, Data: ""}, 48 }, 49 }, 50 { // 6 51 "\"\\\"\"", 52 []parser.Token{ 53 {Type: TokenStringLiteral, Data: "\"\\\"\""}, 54 {Type: parser.TokenDone, Data: ""}, 55 }, 56 }, 57 { // 7 58 "\"\n\"", 59 []parser.Token{ 60 {Type: parser.TokenError, Data: "line terminator in string: \"\n"}, 61 }, 62 }, 63 { // 8 64 "\"\\n\"", 65 []parser.Token{ 66 {Type: TokenStringLiteral, Data: "\"\\n\""}, 67 {Type: parser.TokenDone, Data: ""}, 68 }, 69 }, 70 { // 9 71 "\"\\0\"", 72 []parser.Token{ 73 {Type: TokenStringLiteral, Data: "\"\\0\""}, 74 {Type: parser.TokenDone, Data: ""}, 75 }, 76 }, 77 { // 10 78 "\"\\x20\"", 79 []parser.Token{ 80 {Type: TokenStringLiteral, Data: "\"\\x20\""}, 81 {Type: parser.TokenDone, Data: ""}, 82 }, 83 }, 84 { // 11 85 "\"\\u2020\"", 86 []parser.Token{ 87 {Type: TokenStringLiteral, Data: "\"\\u2020\""}, 88 {Type: parser.TokenDone, Data: ""}, 89 }, 90 }, 91 { // 12 92 "\"\\u\"", 93 []parser.Token{ 94 {Type: parser.TokenError, Data: "invalid escape sequence: \"\\u\""}, 95 }, 96 }, 97 { // 13 98 "\"\\up\"", 99 []parser.Token{ 100 {Type: parser.TokenError, Data: "invalid escape sequence: \"\\up"}, 101 }, 102 }, 103 { // 14 104 "\"\\u{20}\"", 105 []parser.Token{ 106 {Type: TokenStringLiteral, Data: "\"\\u{20}\""}, 107 {Type: parser.TokenDone, Data: ""}, 108 }, 109 }, 110 { // 15 111 "\"use strict\"", 112 []parser.Token{ 113 {Type: TokenStringLiteral, Data: "\"use strict\""}, 114 {Type: parser.TokenDone, Data: ""}, 115 }, 116 }, 117 { // 16 118 "\"use\\u{20}strict\\x65!\\0\"", 119 []parser.Token{ 120 {Type: TokenStringLiteral, Data: "\"use\\u{20}strict\\x65!\\0\""}, 121 {Type: parser.TokenDone, Data: ""}, 122 }, 123 }, 124 { // 17 125 "\"use strict\";", 126 []parser.Token{ 127 {Type: TokenStringLiteral, Data: "\"use strict\""}, 128 {Type: TokenPunctuator, Data: ";"}, 129 {Type: parser.TokenDone, Data: ""}, 130 }, 131 }, 132 { // 18 133 "0", 134 []parser.Token{ 135 {Type: TokenNumericLiteral, Data: "0"}, 136 {Type: parser.TokenDone, Data: ""}, 137 }, 138 }, 139 { // 19 140 "0.1", 141 []parser.Token{ 142 {Type: TokenNumericLiteral, Data: "0.1"}, 143 {Type: parser.TokenDone, Data: ""}, 144 }, 145 }, 146 { // 20 147 ".1", 148 []parser.Token{ 149 {Type: TokenNumericLiteral, Data: ".1"}, 150 {Type: parser.TokenDone, Data: ""}, 151 }, 152 }, 153 { // 21 154 "0b0", 155 []parser.Token{ 156 {Type: TokenNumericLiteral, Data: "0b0"}, 157 {Type: parser.TokenDone, Data: ""}, 158 }, 159 }, 160 { // 22 161 "0b1", 162 []parser.Token{ 163 {Type: TokenNumericLiteral, Data: "0b1"}, 164 {Type: parser.TokenDone, Data: ""}, 165 }, 166 }, 167 { // 23 168 "0b1001010101", 169 []parser.Token{ 170 {Type: TokenNumericLiteral, Data: "0b1001010101"}, 171 {Type: parser.TokenDone, Data: ""}, 172 }, 173 }, 174 { // 24 175 "0", 176 []parser.Token{ 177 {Type: TokenNumericLiteral, Data: "0"}, 178 {Type: parser.TokenDone, Data: ""}, 179 }, 180 }, 181 { // 25 182 "1", 183 []parser.Token{ 184 {Type: TokenNumericLiteral, Data: "1"}, 185 {Type: parser.TokenDone, Data: ""}, 186 }, 187 }, 188 { // 26 189 "9", 190 []parser.Token{ 191 {Type: TokenNumericLiteral, Data: "9"}, 192 {Type: parser.TokenDone, Data: ""}, 193 }, 194 }, 195 { // 27 196 "12345678901", 197 []parser.Token{ 198 {Type: TokenNumericLiteral, Data: "12345678901"}, 199 {Type: parser.TokenDone, Data: ""}, 200 }, 201 }, 202 { // 28 203 "12345678.901", 204 []parser.Token{ 205 {Type: TokenNumericLiteral, Data: "12345678.901"}, 206 {Type: parser.TokenDone, Data: ""}, 207 }, 208 }, 209 { // 29 210 "12345678901E123", 211 []parser.Token{ 212 {Type: TokenNumericLiteral, Data: "12345678901E123"}, 213 {Type: parser.TokenDone, Data: ""}, 214 }, 215 }, 216 { // 30 217 "12345678901e+123", 218 []parser.Token{ 219 {Type: TokenNumericLiteral, Data: "12345678901e+123"}, 220 {Type: parser.TokenDone, Data: ""}, 221 }, 222 }, 223 { // 31 224 "12345678.901E-123", 225 []parser.Token{ 226 {Type: TokenNumericLiteral, Data: "12345678.901E-123"}, 227 {Type: parser.TokenDone, Data: ""}, 228 }, 229 }, 230 { // 32 231 "0x0", 232 []parser.Token{ 233 {Type: TokenNumericLiteral, Data: "0x0"}, 234 {Type: parser.TokenDone, Data: ""}, 235 }, 236 }, 237 { // 33 238 "0xa", 239 []parser.Token{ 240 {Type: TokenNumericLiteral, Data: "0xa"}, 241 {Type: parser.TokenDone, Data: ""}, 242 }, 243 }, 244 { // 34 245 "0xf", 246 []parser.Token{ 247 {Type: TokenNumericLiteral, Data: "0xf"}, 248 {Type: parser.TokenDone, Data: ""}, 249 }, 250 }, 251 { // 35 252 "0x0f", 253 []parser.Token{ 254 {Type: TokenNumericLiteral, Data: "0x0f"}, 255 {Type: parser.TokenDone, Data: ""}, 256 }, 257 }, 258 { // 36 259 "0xaf", 260 []parser.Token{ 261 {Type: TokenNumericLiteral, Data: "0xaf"}, 262 {Type: parser.TokenDone, Data: ""}, 263 }, 264 }, 265 { // 37 266 "0xDeAdBeEf", 267 []parser.Token{ 268 {Type: TokenNumericLiteral, Data: "0xDeAdBeEf"}, 269 {Type: parser.TokenDone, Data: ""}, 270 }, 271 }, 272 { // 38 273 "0n", 274 []parser.Token{ 275 {Type: TokenNumericLiteral, Data: "0n"}, 276 {Type: parser.TokenDone, Data: ""}, 277 }, 278 }, 279 { // 39 280 "1n", 281 []parser.Token{ 282 {Type: TokenNumericLiteral, Data: "1n"}, 283 {Type: parser.TokenDone, Data: ""}, 284 }, 285 }, 286 { // 40 287 "1234567890n", 288 []parser.Token{ 289 {Type: TokenNumericLiteral, Data: "1234567890n"}, 290 {Type: parser.TokenDone, Data: ""}, 291 }, 292 }, 293 { // 41 294 "0x1234567890n", 295 []parser.Token{ 296 {Type: TokenNumericLiteral, Data: "0x1234567890n"}, 297 {Type: parser.TokenDone, Data: ""}, 298 }, 299 }, 300 { // 42 301 "Infinity", 302 []parser.Token{ 303 {Type: TokenNumericLiteral, Data: "Infinity"}, 304 {Type: parser.TokenDone, Data: ""}, 305 }, 306 }, 307 { // 43 308 "true", 309 []parser.Token{ 310 {Type: TokenBooleanLiteral, Data: "true"}, 311 {Type: parser.TokenDone, Data: ""}, 312 }, 313 }, 314 { // 44 315 "false", 316 []parser.Token{ 317 {Type: TokenBooleanLiteral, Data: "false"}, 318 {Type: parser.TokenDone, Data: ""}, 319 }, 320 }, 321 { // 45 322 "hello", 323 []parser.Token{ 324 {Type: TokenIdentifier, Data: "hello"}, 325 {Type: parser.TokenDone, Data: ""}, 326 }, 327 }, 328 { // 46 329 "this", 330 []parser.Token{ 331 {Type: TokenKeyword, Data: "this"}, 332 {Type: parser.TokenDone, Data: ""}, 333 }, 334 }, 335 { // 47 336 "function", 337 []parser.Token{ 338 {Type: TokenKeyword, Data: "function"}, 339 {Type: parser.TokenDone, Data: ""}, 340 }, 341 }, 342 { // 48 343 "/[a-z]+/g", 344 []parser.Token{ 345 {Type: TokenRegularExpressionLiteral, Data: "/[a-z]+/g"}, 346 {Type: parser.TokenDone, Data: ""}, 347 }, 348 }, 349 { // 49 350 "/[\\n]/g", 351 []parser.Token{ 352 {Type: TokenRegularExpressionLiteral, Data: "/[\\n]/g"}, 353 {Type: parser.TokenDone, Data: ""}, 354 }, 355 }, 356 { // 50 357 "var a = /^ab[cd]*$/ig;", 358 []parser.Token{ 359 {Type: TokenKeyword, Data: "var"}, 360 {Type: TokenWhitespace, Data: " "}, 361 {Type: TokenIdentifier, Data: "a"}, 362 {Type: TokenWhitespace, Data: " "}, 363 {Type: TokenPunctuator, Data: "="}, 364 {Type: TokenWhitespace, Data: " "}, 365 {Type: TokenRegularExpressionLiteral, Data: "/^ab[cd]*$/ig"}, 366 {Type: TokenPunctuator, Data: ";"}, 367 {Type: parser.TokenDone, Data: ""}, 368 }, 369 }, 370 { // 51 371 "num /= 4", 372 []parser.Token{ 373 {Type: TokenIdentifier, Data: "num"}, 374 {Type: TokenWhitespace, Data: " "}, 375 {Type: TokenDivPunctuator, Data: "/="}, 376 {Type: TokenWhitespace, Data: " "}, 377 {Type: TokenNumericLiteral, Data: "4"}, 378 {Type: parser.TokenDone, Data: ""}, 379 }, 380 }, 381 { // 52 382 "const num = 8 / 4;", 383 []parser.Token{ 384 {Type: TokenKeyword, Data: "const"}, 385 {Type: TokenWhitespace, Data: " "}, 386 {Type: TokenIdentifier, Data: "num"}, 387 {Type: TokenWhitespace, Data: " "}, 388 {Type: TokenPunctuator, Data: "="}, 389 {Type: TokenWhitespace, Data: " "}, 390 {Type: TokenNumericLiteral, Data: "8"}, 391 {Type: TokenWhitespace, Data: " "}, 392 {Type: TokenDivPunctuator, Data: "/"}, 393 {Type: TokenWhitespace, Data: " "}, 394 {Type: TokenNumericLiteral, Data: "4"}, 395 {Type: TokenPunctuator, Data: ";"}, 396 {Type: parser.TokenDone, Data: ""}, 397 }, 398 }, 399 { // 53 400 "``", 401 []parser.Token{ 402 {Type: TokenNoSubstitutionTemplate, Data: "``"}, 403 {Type: parser.TokenDone, Data: ""}, 404 }, 405 }, 406 { // 54 407 "`abc`", 408 []parser.Token{ 409 {Type: TokenNoSubstitutionTemplate, Data: "`abc`"}, 410 {Type: parser.TokenDone, Data: ""}, 411 }, 412 }, 413 { // 55 414 "`ab${ (val.a / 2) + 1 }c${str}`", 415 []parser.Token{ 416 {Type: TokenTemplateHead, Data: "`ab${"}, 417 {Type: TokenWhitespace, Data: " "}, 418 {Type: TokenPunctuator, Data: "("}, 419 {Type: TokenIdentifier, Data: "val"}, 420 {Type: TokenPunctuator, Data: "."}, 421 {Type: TokenIdentifier, Data: "a"}, 422 {Type: TokenWhitespace, Data: " "}, 423 {Type: TokenDivPunctuator, Data: "/"}, 424 {Type: TokenWhitespace, Data: " "}, 425 {Type: TokenNumericLiteral, Data: "2"}, 426 {Type: TokenPunctuator, Data: ")"}, 427 {Type: TokenWhitespace, Data: " "}, 428 {Type: TokenPunctuator, Data: "+"}, 429 {Type: TokenWhitespace, Data: " "}, 430 {Type: TokenNumericLiteral, Data: "1"}, 431 {Type: TokenWhitespace, Data: " "}, 432 {Type: TokenTemplateMiddle, Data: "}c${"}, 433 {Type: TokenIdentifier, Data: "str"}, 434 {Type: TokenTemplateTail, Data: "}`"}, 435 {Type: parser.TokenDone, Data: ""}, 436 }, 437 }, 438 { // 56 439 "const myFunc = function(aye, bee, cea) {\n const num = [123, 4, lastNum(aye, \"beep\", () => window, val => val * 2, (myVar) => {myVar /= 2;return myVar;})], elm = document.getElementByID();\n console.log(bee, num, elm);}", 440 []parser.Token{ 441 {Type: TokenKeyword, Data: "const"}, 442 {Type: TokenWhitespace, Data: " "}, 443 {Type: TokenIdentifier, Data: "myFunc"}, 444 {Type: TokenWhitespace, Data: " "}, 445 {Type: TokenPunctuator, Data: "="}, 446 {Type: TokenWhitespace, Data: " "}, 447 {Type: TokenKeyword, Data: "function"}, 448 {Type: TokenPunctuator, Data: "("}, 449 {Type: TokenIdentifier, Data: "aye"}, 450 {Type: TokenPunctuator, Data: ","}, 451 {Type: TokenWhitespace, Data: " "}, 452 {Type: TokenIdentifier, Data: "bee"}, 453 {Type: TokenPunctuator, Data: ","}, 454 {Type: TokenWhitespace, Data: " "}, 455 {Type: TokenIdentifier, Data: "cea"}, 456 {Type: TokenPunctuator, Data: ")"}, 457 {Type: TokenWhitespace, Data: " "}, 458 {Type: TokenPunctuator, Data: "{"}, 459 {Type: TokenLineTerminator, Data: "\n"}, 460 {Type: TokenWhitespace, Data: " "}, 461 {Type: TokenKeyword, Data: "const"}, 462 {Type: TokenWhitespace, Data: " "}, 463 {Type: TokenIdentifier, Data: "num"}, 464 {Type: TokenWhitespace, Data: " "}, 465 {Type: TokenPunctuator, Data: "="}, 466 {Type: TokenWhitespace, Data: " "}, 467 {Type: TokenPunctuator, Data: "["}, 468 {Type: TokenNumericLiteral, Data: "123"}, 469 {Type: TokenPunctuator, Data: ","}, 470 {Type: TokenWhitespace, Data: " "}, 471 {Type: TokenNumericLiteral, Data: "4"}, 472 {Type: TokenPunctuator, Data: ","}, 473 {Type: TokenWhitespace, Data: " "}, 474 {Type: TokenIdentifier, Data: "lastNum"}, 475 {Type: TokenPunctuator, Data: "("}, 476 {Type: TokenIdentifier, Data: "aye"}, 477 {Type: TokenPunctuator, Data: ","}, 478 {Type: TokenWhitespace, Data: " "}, 479 {Type: TokenStringLiteral, Data: "\"beep\""}, 480 {Type: TokenPunctuator, Data: ","}, 481 {Type: TokenWhitespace, Data: " "}, 482 {Type: TokenPunctuator, Data: "("}, 483 {Type: TokenPunctuator, Data: ")"}, 484 {Type: TokenWhitespace, Data: " "}, 485 {Type: TokenPunctuator, Data: "=>"}, 486 {Type: TokenWhitespace, Data: " "}, 487 {Type: TokenIdentifier, Data: "window"}, 488 {Type: TokenPunctuator, Data: ","}, 489 {Type: TokenWhitespace, Data: " "}, 490 {Type: TokenIdentifier, Data: "val"}, 491 {Type: TokenWhitespace, Data: " "}, 492 {Type: TokenPunctuator, Data: "=>"}, 493 {Type: TokenWhitespace, Data: " "}, 494 {Type: TokenIdentifier, Data: "val"}, 495 {Type: TokenWhitespace, Data: " "}, 496 {Type: TokenPunctuator, Data: "*"}, 497 {Type: TokenWhitespace, Data: " "}, 498 {Type: TokenNumericLiteral, Data: "2"}, 499 {Type: TokenPunctuator, Data: ","}, 500 {Type: TokenWhitespace, Data: " "}, 501 {Type: TokenPunctuator, Data: "("}, 502 {Type: TokenIdentifier, Data: "myVar"}, 503 {Type: TokenPunctuator, Data: ")"}, 504 {Type: TokenWhitespace, Data: " "}, 505 {Type: TokenPunctuator, Data: "=>"}, 506 {Type: TokenWhitespace, Data: " "}, 507 {Type: TokenPunctuator, Data: "{"}, 508 {Type: TokenIdentifier, Data: "myVar"}, 509 {Type: TokenWhitespace, Data: " "}, 510 {Type: TokenDivPunctuator, Data: "/="}, 511 {Type: TokenWhitespace, Data: " "}, 512 {Type: TokenNumericLiteral, Data: "2"}, 513 {Type: TokenPunctuator, Data: ";"}, 514 {Type: TokenKeyword, Data: "return"}, 515 {Type: TokenWhitespace, Data: " "}, 516 {Type: TokenIdentifier, Data: "myVar"}, 517 {Type: TokenPunctuator, Data: ";"}, 518 {Type: TokenRightBracePunctuator, Data: "}"}, 519 {Type: TokenPunctuator, Data: ")"}, 520 {Type: TokenPunctuator, Data: "]"}, 521 {Type: TokenPunctuator, Data: ","}, 522 {Type: TokenWhitespace, Data: " "}, 523 {Type: TokenIdentifier, Data: "elm"}, 524 {Type: TokenWhitespace, Data: " "}, 525 {Type: TokenPunctuator, Data: "="}, 526 {Type: TokenWhitespace, Data: " "}, 527 {Type: TokenIdentifier, Data: "document"}, 528 {Type: TokenPunctuator, Data: "."}, 529 {Type: TokenIdentifier, Data: "getElementByID"}, 530 {Type: TokenPunctuator, Data: "("}, 531 {Type: TokenPunctuator, Data: ")"}, 532 {Type: TokenPunctuator, Data: ";"}, 533 {Type: TokenLineTerminator, Data: "\n"}, 534 {Type: TokenWhitespace, Data: " "}, 535 {Type: TokenIdentifier, Data: "console"}, 536 {Type: TokenPunctuator, Data: "."}, 537 {Type: TokenIdentifier, Data: "log"}, 538 {Type: TokenPunctuator, Data: "("}, 539 {Type: TokenIdentifier, Data: "bee"}, 540 {Type: TokenPunctuator, Data: ","}, 541 {Type: TokenWhitespace, Data: " "}, 542 {Type: TokenIdentifier, Data: "num"}, 543 {Type: TokenPunctuator, Data: ","}, 544 {Type: TokenWhitespace, Data: " "}, 545 {Type: TokenIdentifier, Data: "elm"}, 546 {Type: TokenPunctuator, Data: ")"}, 547 {Type: TokenPunctuator, Data: ";"}, 548 {Type: TokenRightBracePunctuator, Data: "}"}, 549 {Type: parser.TokenDone, Data: ""}, 550 }, 551 }, 552 { // 57 553 "export {name1, name2};", 554 []parser.Token{ 555 {Type: TokenKeyword, Data: "export"}, 556 {Type: TokenWhitespace, Data: " "}, 557 {Type: TokenPunctuator, Data: "{"}, 558 {Type: TokenIdentifier, Data: "name1"}, 559 {Type: TokenPunctuator, Data: ","}, 560 {Type: TokenWhitespace, Data: " "}, 561 {Type: TokenIdentifier, Data: "name2"}, 562 {Type: TokenRightBracePunctuator, Data: "}"}, 563 {Type: TokenPunctuator, Data: ";"}, 564 {Type: parser.TokenDone, Data: ""}, 565 }, 566 }, 567 { // 58 568 "export {var1 as name1, var2 as name2};", 569 []parser.Token{ 570 {Type: TokenKeyword, Data: "export"}, 571 {Type: TokenWhitespace, Data: " "}, 572 {Type: TokenPunctuator, Data: "{"}, 573 {Type: TokenIdentifier, Data: "var1"}, 574 {Type: TokenWhitespace, Data: " "}, 575 {Type: TokenIdentifier, Data: "as"}, 576 {Type: TokenWhitespace, Data: " "}, 577 {Type: TokenIdentifier, Data: "name1"}, 578 {Type: TokenPunctuator, Data: ","}, 579 {Type: TokenWhitespace, Data: " "}, 580 {Type: TokenIdentifier, Data: "var2"}, 581 {Type: TokenWhitespace, Data: " "}, 582 {Type: TokenIdentifier, Data: "as"}, 583 {Type: TokenWhitespace, Data: " "}, 584 {Type: TokenIdentifier, Data: "name2"}, 585 {Type: TokenRightBracePunctuator, Data: "}"}, 586 {Type: TokenPunctuator, Data: ";"}, 587 {Type: parser.TokenDone, Data: ""}, 588 }, 589 }, 590 { // 59 591 "export * from './other.js';", 592 []parser.Token{ 593 {Type: TokenKeyword, Data: "export"}, 594 {Type: TokenWhitespace, Data: " "}, 595 {Type: TokenPunctuator, Data: "*"}, 596 {Type: TokenWhitespace, Data: " "}, 597 {Type: TokenIdentifier, Data: "from"}, 598 {Type: TokenWhitespace, Data: " "}, 599 {Type: TokenStringLiteral, Data: "'./other.js'"}, 600 {Type: TokenPunctuator, Data: ";"}, 601 {Type: parser.TokenDone, Data: ""}, 602 }, 603 }, 604 { // 60 605 "import * as name from './module.js';", 606 []parser.Token{ 607 {Type: TokenKeyword, Data: "import"}, 608 {Type: TokenWhitespace, Data: " "}, 609 {Type: TokenPunctuator, Data: "*"}, 610 {Type: TokenWhitespace, Data: " "}, 611 {Type: TokenIdentifier, Data: "as"}, 612 {Type: TokenWhitespace, Data: " "}, 613 {Type: TokenIdentifier, Data: "name"}, 614 {Type: TokenWhitespace, Data: " "}, 615 {Type: TokenIdentifier, Data: "from"}, 616 {Type: TokenWhitespace, Data: " "}, 617 {Type: TokenStringLiteral, Data: "'./module.js'"}, 618 {Type: TokenPunctuator, Data: ";"}, 619 {Type: parser.TokenDone, Data: ""}, 620 }, 621 }, 622 { // 61 623 "$", 624 []parser.Token{ 625 {Type: TokenIdentifier, Data: "$"}, 626 {Type: parser.TokenDone, Data: ""}, 627 }, 628 }, 629 { // 62 630 "_", 631 []parser.Token{ 632 {Type: TokenIdentifier, Data: "_"}, 633 {Type: parser.TokenDone, Data: ""}, 634 }, 635 }, 636 { // 63 637 "\\u0061", 638 []parser.Token{ 639 {Type: TokenIdentifier, Data: "\\u0061"}, 640 {Type: parser.TokenDone, Data: ""}, 641 }, 642 }, 643 { // 64 644 "// Comment", 645 []parser.Token{ 646 {Type: TokenSingleLineComment, Data: "// Comment"}, 647 {Type: parser.TokenDone, Data: ""}, 648 }, 649 }, 650 { // 65 651 "enum", 652 []parser.Token{ 653 {Type: TokenFutureReservedWord, Data: "enum"}, 654 {Type: parser.TokenDone, Data: ""}, 655 }, 656 }, 657 { // 66 658 ".01234E56", 659 []parser.Token{ 660 {Type: TokenNumericLiteral, Data: ".01234E56"}, 661 {Type: parser.TokenDone, Data: ""}, 662 }, 663 }, 664 { // 67 665 "0.01234E56", 666 []parser.Token{ 667 {Type: TokenNumericLiteral, Data: "0.01234E56"}, 668 {Type: parser.TokenDone, Data: ""}, 669 }, 670 }, 671 { // 68 672 "0o1234567", 673 []parser.Token{ 674 {Type: TokenNumericLiteral, Data: "0o1234567"}, 675 {Type: parser.TokenDone, Data: ""}, 676 }, 677 }, 678 { // 69 679 "`\\x60`", 680 []parser.Token{ 681 {Type: TokenNoSubstitutionTemplate, Data: "`\\x60`"}, 682 {Type: parser.TokenDone, Data: ""}, 683 }, 684 }, 685 { // 70 686 "/\\(/", 687 []parser.Token{ 688 {Type: TokenRegularExpressionLiteral, Data: "/\\(/"}, 689 {Type: parser.TokenDone, Data: ""}, 690 }, 691 }, 692 { // 71 693 "/a\\(/", 694 []parser.Token{ 695 {Type: TokenRegularExpressionLiteral, Data: "/a\\(/"}, 696 {Type: parser.TokenDone, Data: ""}, 697 }, 698 }, 699 { // 72 700 "{", 701 []parser.Token{ 702 {Type: TokenPunctuator, Data: "{"}, 703 {Type: parser.TokenError, Data: "unexpected EOF"}, 704 }, 705 }, 706 { // 73 707 "[", 708 []parser.Token{ 709 {Type: TokenPunctuator, Data: "["}, 710 {Type: parser.TokenError, Data: "unexpected EOF"}, 711 }, 712 }, 713 { // 74 714 "(", 715 []parser.Token{ 716 {Type: TokenPunctuator, Data: "("}, 717 {Type: parser.TokenError, Data: "unexpected EOF"}, 718 }, 719 }, 720 { // 75 721 "/*", 722 []parser.Token{ 723 {Type: parser.TokenError, Data: "unexpected EOF"}, 724 }, 725 }, 726 { // 76 727 "[}", 728 []parser.Token{ 729 {Type: TokenPunctuator, Data: "["}, 730 {Type: parser.TokenError, Data: "invalid character: }"}, 731 }, 732 }, 733 { // 77 734 "(}", 735 []parser.Token{ 736 {Type: TokenPunctuator, Data: "("}, 737 {Type: parser.TokenError, Data: "invalid character: }"}, 738 }, 739 }, 740 { // 78 741 "{)", 742 []parser.Token{ 743 {Type: TokenPunctuator, Data: "{"}, 744 {Type: parser.TokenError, Data: "invalid character: )"}, 745 }, 746 }, 747 { // 79 748 "{]", 749 []parser.Token{ 750 {Type: TokenPunctuator, Data: "{"}, 751 {Type: parser.TokenError, Data: "invalid character: ]"}, 752 }, 753 }, 754 { // 80 755 "(]", 756 []parser.Token{ 757 {Type: TokenPunctuator, Data: "("}, 758 {Type: parser.TokenError, Data: "invalid character: ]"}, 759 }, 760 }, 761 { // 81 762 "[)", 763 []parser.Token{ 764 {Type: TokenPunctuator, Data: "["}, 765 {Type: parser.TokenError, Data: "invalid character: )"}, 766 }, 767 }, 768 { // 82 769 "..", 770 []parser.Token{ 771 {Type: parser.TokenError, Data: "unexpected EOF"}, 772 }, 773 }, 774 { // 83 775 "..a", 776 []parser.Token{ 777 {Type: parser.TokenError, Data: "invalid character sequence: ..a"}, 778 }, 779 }, 780 { // 84 781 "/\\\n/", 782 []parser.Token{ 783 {Type: parser.TokenError, Data: "invalid regexp sequence: /\\\n"}, 784 }, 785 }, 786 { // 85 787 "/[", 788 []parser.Token{ 789 {Type: parser.TokenError, Data: "unexpected EOF"}, 790 }, 791 }, 792 { // 86 793 "/[\\", 794 []parser.Token{ 795 {Type: parser.TokenError, Data: "unexpected EOF"}, 796 }, 797 }, 798 { // 87 799 "/", 800 []parser.Token{ 801 {Type: parser.TokenError, Data: "unexpected EOF"}, 802 }, 803 }, 804 { // 88 805 "/\n", 806 []parser.Token{ 807 {Type: parser.TokenError, Data: "invalid regexp character: \n"}, 808 }, 809 }, 810 { // 89 811 "/a", 812 []parser.Token{ 813 {Type: parser.TokenError, Data: "unexpected EOF"}, 814 }, 815 }, 816 { // 90 817 "/a\\\n/", 818 []parser.Token{ 819 {Type: parser.TokenError, Data: "invalid regexp sequence: /a\\\n"}, 820 }, 821 }, 822 { // 91 823 "/a[", 824 []parser.Token{ 825 {Type: parser.TokenError, Data: "unexpected EOF"}, 826 }, 827 }, 828 { // 92 829 "/a\n", 830 []parser.Token{ 831 {Type: parser.TokenError, Data: "invalid regexp character: \n"}, 832 }, 833 }, 834 { // 93 835 "0B9", 836 []parser.Token{ 837 {Type: parser.TokenError, Data: "invalid number: 0B9"}, 838 }, 839 }, 840 { // 94 841 "0O9", 842 []parser.Token{ 843 {Type: parser.TokenError, Data: "invalid number: 0O9"}, 844 }, 845 }, 846 { // 95 847 "0XG", 848 []parser.Token{ 849 {Type: parser.TokenError, Data: "invalid number: 0XG"}, 850 }, 851 }, 852 { // 96 853 "\\x60", 854 []parser.Token{ 855 {Type: parser.TokenError, Data: "unexpected backslash: \\x"}, 856 }, 857 }, 858 { // 97 859 "\\ug", 860 []parser.Token{ 861 {Type: parser.TokenError, Data: "invalid unicode escape sequence: \\ug"}, 862 }, 863 }, 864 { // 98 865 "\\u{G}", 866 []parser.Token{ 867 {Type: parser.TokenError, Data: "invalid unicode escape sequence: \\u{G"}, 868 }, 869 }, 870 { // 99 871 "\\u{ffff", 872 []parser.Token{ 873 {Type: parser.TokenError, Data: "invalid unicode escape sequence: \\u{ffff"}, 874 }, 875 }, 876 { // 100 877 "}", 878 []parser.Token{ 879 {Type: parser.TokenError, Data: "invalid character: }"}, 880 }, 881 }, 882 { // 101 883 "`\\G`", 884 []parser.Token{ 885 {Type: TokenNoSubstitutionTemplate, Data: "`\\G`"}, 886 {Type: parser.TokenDone, Data: ""}, 887 }, 888 }, 889 { // 102 890 "`", 891 []parser.Token{ 892 {Type: parser.TokenError, Data: "unexpected EOF"}, 893 }, 894 }, 895 { // 103 896 "1_234_567", 897 []parser.Token{ 898 {Type: TokenNumericLiteral, Data: "1_234_567"}, 899 {Type: parser.TokenDone, Data: ""}, 900 }, 901 }, 902 { // 104 903 "1_", 904 []parser.Token{ 905 {Type: parser.TokenError, Data: "invalid number: 1_"}, 906 }, 907 }, 908 { // 105 909 "1__234_567", 910 []parser.Token{ 911 {Type: parser.TokenError, Data: "invalid number: 1__"}, 912 }, 913 }, 914 { // 106 915 "123e-456_789", 916 []parser.Token{ 917 {Type: TokenNumericLiteral, Data: "123e-456_789"}, 918 {Type: parser.TokenDone, Data: ""}, 919 }, 920 }, 921 { // 107 922 "0.123_456", 923 []parser.Token{ 924 {Type: TokenNumericLiteral, Data: "0.123_456"}, 925 {Type: parser.TokenDone, Data: ""}, 926 }, 927 }, 928 { // 108 929 "1.2_3_4_5_6_7", 930 []parser.Token{ 931 {Type: TokenNumericLiteral, Data: "1.2_3_4_5_6_7"}, 932 {Type: parser.TokenDone, Data: ""}, 933 }, 934 }, 935 { // 109 936 "0x1_2", 937 []parser.Token{ 938 {Type: TokenNumericLiteral, Data: "0x1_2"}, 939 {Type: parser.TokenDone, Data: ""}, 940 }, 941 }, 942 { // 110 943 "0b1_0", 944 []parser.Token{ 945 {Type: TokenNumericLiteral, Data: "0b1_0"}, 946 {Type: parser.TokenDone, Data: ""}, 947 }, 948 }, 949 { // 111 950 "0o1_7", 951 []parser.Token{ 952 {Type: TokenNumericLiteral, Data: "0o1_7"}, 953 {Type: parser.TokenDone, Data: ""}, 954 }, 955 }, 956 { // 112 957 "a.b", 958 []parser.Token{ 959 {Type: TokenIdentifier, Data: "a"}, 960 {Type: TokenPunctuator, Data: "."}, 961 {Type: TokenIdentifier, Data: "b"}, 962 }, 963 }, 964 { // 113 965 "a?.b", 966 []parser.Token{ 967 {Type: TokenIdentifier, Data: "a"}, 968 {Type: TokenPunctuator, Data: "?."}, 969 {Type: TokenIdentifier, Data: "b"}, 970 }, 971 }, 972 { // 114 973 "a??b", 974 []parser.Token{ 975 {Type: TokenIdentifier, Data: "a"}, 976 {Type: TokenPunctuator, Data: "??"}, 977 {Type: TokenIdentifier, Data: "b"}, 978 }, 979 }, 980 { // 115 981 "0.", 982 []parser.Token{ 983 {Type: parser.TokenError, Data: "invalid number: 0."}, 984 }, 985 }, 986 { // 116 987 "0.1e", 988 []parser.Token{ 989 {Type: parser.TokenError, Data: "invalid number: 0.1e"}, 990 }, 991 }, 992 { // 117 993 "1.", 994 []parser.Token{ 995 {Type: parser.TokenError, Data: "invalid number: 1."}, 996 }, 997 }, 998 { // 118 999 "1.1e", 1000 []parser.Token{ 1001 {Type: parser.TokenError, Data: "invalid number: 1.1e"}, 1002 }, 1003 }, 1004 { // 119 1005 "import(a)", 1006 []parser.Token{ 1007 {Type: TokenKeyword, Data: "import"}, 1008 {Type: TokenPunctuator, Data: "("}, 1009 {Type: TokenIdentifier, Data: "a"}, 1010 {Type: TokenPunctuator, Data: ")"}, 1011 }, 1012 }, 1013 { // 120 1014 "\\u0060", 1015 []parser.Token{ 1016 {Type: parser.TokenError, Data: "invalid unicode escape sequence: \\u0060"}, 1017 }, 1018 }, 1019 { // 121 1020 "\\u0024", 1021 []parser.Token{ 1022 {Type: TokenIdentifier, Data: "\\u0024"}, 1023 {Type: parser.TokenDone, Data: ""}, 1024 }, 1025 }, 1026 { // 122 1027 "\\u{5f}", 1028 []parser.Token{ 1029 {Type: TokenIdentifier, Data: "\\u{5f}"}, 1030 {Type: parser.TokenDone, Data: ""}, 1031 }, 1032 }, 1033 { // 123 1034 "\\u{41}", 1035 []parser.Token{ 1036 {Type: TokenIdentifier, Data: "\\u{41}"}, 1037 {Type: parser.TokenDone, Data: ""}, 1038 }, 1039 }, 1040 { // 124 1041 "\\u{0}", 1042 []parser.Token{ 1043 {Type: parser.TokenError, Data: "invalid unicode escape sequence: \\u{0}"}, 1044 }, 1045 }, 1046 { // 125 1047 "\\u005C", 1048 []parser.Token{ 1049 {Type: parser.TokenError, Data: "invalid unicode escape sequence: \\u005C"}, 1050 }, 1051 }, 1052 { // 126 1053 "/a/g", 1054 []parser.Token{ 1055 {Type: TokenRegularExpressionLiteral, Data: "/a/g"}, 1056 }, 1057 }, 1058 { // 127 1059 "/a/\\u000A", 1060 []parser.Token{ 1061 {Type: TokenRegularExpressionLiteral, Data: "/a/"}, 1062 }, 1063 }, 1064 { // 128 1065 "a`b${f}c`", 1066 []parser.Token{ 1067 {Type: TokenIdentifier, Data: "a"}, 1068 {Type: TokenTemplateHead, Data: "`b${"}, 1069 {Type: TokenIdentifier, Data: "f"}, 1070 {Type: TokenTemplateTail, Data: "}c`"}, 1071 }, 1072 }, 1073 { // 129 1074 "#a", 1075 []parser.Token{ 1076 {Type: TokenPrivateIdentifier, Data: "#a"}, 1077 }, 1078 }, 1079 { // 130 1080 "a.#b", 1081 []parser.Token{ 1082 {Type: TokenIdentifier, Data: "a"}, 1083 {Type: TokenPunctuator, Data: "."}, 1084 {Type: TokenPrivateIdentifier, Data: "#b"}, 1085 }, 1086 }, 1087 { // 131 1088 "#", 1089 []parser.Token{ 1090 {Type: parser.TokenError, Data: "invalid character sequence: #"}, 1091 }, 1092 }, 1093 { // 132 1094 "#.a", 1095 []parser.Token{ 1096 {Type: parser.TokenError, Data: "invalid character sequence: #."}, 1097 }, 1098 }, 1099 { // 133 1100 "Number(10000n * this.#numerator / this.#denominator) / 10000;", 1101 []parser.Token{ 1102 {Type: TokenIdentifier, Data: "Number"}, 1103 {Type: TokenPunctuator, Data: "("}, 1104 {Type: TokenNumericLiteral, Data: "10000n"}, 1105 {Type: TokenWhitespace, Data: " "}, 1106 {Type: TokenPunctuator, Data: "*"}, 1107 {Type: TokenWhitespace, Data: " "}, 1108 {Type: TokenKeyword, Data: "this"}, 1109 {Type: TokenPunctuator, Data: "."}, 1110 {Type: TokenPrivateIdentifier, Data: "#numerator"}, 1111 {Type: TokenWhitespace, Data: " "}, 1112 {Type: TokenDivPunctuator, Data: "/"}, 1113 {Type: TokenWhitespace, Data: " "}, 1114 {Type: TokenKeyword, Data: "this"}, 1115 {Type: TokenPunctuator, Data: "."}, 1116 {Type: TokenPrivateIdentifier, Data: "#denominator"}, 1117 {Type: TokenPunctuator, Data: ")"}, 1118 {Type: TokenWhitespace, Data: " "}, 1119 {Type: TokenDivPunctuator, Data: "/"}, 1120 {Type: TokenWhitespace, Data: " "}, 1121 {Type: TokenNumericLiteral, Data: "10000"}, 1122 {Type: TokenPunctuator, Data: ";"}, 1123 }, 1124 }, 1125 { // 134 1126 "`\\x`", 1127 []parser.Token{ 1128 {Type: parser.TokenError, Data: "invalid escape sequence: `\\x`"}, 1129 }, 1130 }, 1131 { // 135 1132 ".123_456", 1133 []parser.Token{ 1134 {Type: TokenNumericLiteral, Data: ".123_456"}, 1135 }, 1136 }, 1137 } { 1138 p := parser.NewStringTokeniser(test.Input) 1139 SetTokeniser(&p) 1140 for m, tkn := range test.Output { 1141 tk, _ := p.GetToken() 1142 if tk.Type != tkn.Type { 1143 if tk.Type == parser.TokenError { 1144 t.Errorf("test %d.%d: unexpected error: %s", n+1, m+1, tk.Data) 1145 } else { 1146 t.Errorf("test %d.%d: Incorrect type, expecting %d, got %d", n+1, m+1, tkn.Type, tk.Type) 1147 } 1148 break 1149 } else if tk.Data != tkn.Data { 1150 t.Errorf("test %d.%d: Incorrect data, expecting %q, got %q", n+1, m+1, tkn.Data, tk.Data) 1151 break 1152 } 1153 } 1154 } 1155 } 1156