match - tokeniser_test.go
1 package match
2
3 import (
4 "testing"
5
6 "vimagination.zapto.org/parser"
7 )
8
9 func TestTokeniser(t *testing.T) {
10 for n, test := range [...]struct {
11 Input string
12 Output []parser.Token
13 InitialState parser.TokenFunc
14 }{
15 { // 1
16 "",
17 []parser.Token{
18 {Type: tokenStart, Data: ""},
19 {Type: tokenEnd, Data: ""},
20 {Type: parser.TokenDone, Data: ""},
21 },
22 simpleStart,
23 },
24 { // 2
25 "a",
26 []parser.Token{
27 {Type: tokenStart, Data: ""},
28 {Type: tokenChar, Data: "a"},
29 {Type: tokenEnd, Data: ""},
30 {Type: parser.TokenDone, Data: ""},
31 },
32 simpleStart,
33 },
34 { // 3
35 "abc",
36 []parser.Token{
37 {Type: tokenStart, Data: ""},
38 {Type: tokenChar, Data: "a"},
39 {Type: tokenChar, Data: "b"},
40 {Type: tokenChar, Data: "c"},
41 {Type: tokenEnd, Data: ""},
42 {Type: parser.TokenDone, Data: ""},
43 },
44 simpleStart,
45 },
46 { // 4
47 "",
48 []parser.Token{
49 {Type: tokenStart, Data: ""},
50 {Type: tokenEnd, Data: ""},
51 {Type: parser.TokenDone, Data: ""},
52 },
53 partialStringStart,
54 },
55 { // 5
56 "a",
57 []parser.Token{
58 {Type: tokenStart, Data: ""},
59 {Type: tokenChar, Data: "a"},
60 {Type: tokenEnd, Data: ""},
61 {Type: parser.TokenDone, Data: ""},
62 },
63 partialStringStart,
64 },
65 { // 6
66 "abc",
67 []parser.Token{
68 {Type: tokenStart, Data: ""},
69 {Type: tokenChar, Data: "a"},
70 {Type: tokenChar, Data: "b"},
71 {Type: tokenChar, Data: "c"},
72 {Type: tokenEnd, Data: ""},
73 {Type: parser.TokenDone, Data: ""},
74 },
75 partialStringStart,
76 },
77 { // 7
78 "*",
79 []parser.Token{
80 {Type: tokenStart, Data: ""},
81 {Type: tokenAnyChar, Data: ""},
82 {Type: tokenRepeat, Data: "*"},
83 {Type: tokenEnd, Data: ""},
84 {Type: parser.TokenDone, Data: ""},
85 },
86 partialStringStart,
87 },
88 { // 8
89 "*abc",
90 []parser.Token{
91 {Type: tokenStart, Data: ""},
92 {Type: tokenAnyChar, Data: ""},
93 {Type: tokenRepeat, Data: "*"},
94 {Type: tokenChar, Data: "a"},
95 {Type: tokenChar, Data: "b"},
96 {Type: tokenChar, Data: "c"},
97 {Type: tokenEnd, Data: ""},
98 {Type: parser.TokenDone, Data: ""},
99 },
100 partialStringStart,
101 },
102 { // 9
103 "*abc*",
104 []parser.Token{
105 {Type: tokenStart, Data: ""},
106 {Type: tokenAnyChar, Data: ""},
107 {Type: tokenRepeat, Data: "*"},
108 {Type: tokenChar, Data: "a"},
109 {Type: tokenChar, Data: "b"},
110 {Type: tokenChar, Data: "c"},
111 {Type: tokenAnyChar, Data: ""},
112 {Type: tokenRepeat, Data: "*"},
113 {Type: tokenEnd, Data: ""},
114 {Type: parser.TokenDone, Data: ""},
115 },
116 partialStringStart,
117 },
118 } {
119 p := parser.NewStringTokeniser(test.Input)
120
121 p.TokeniserState(test.InitialState)
122
123 for m, tkn := range test.Output {
124 if tk, _ := p.GetToken(); tk.Type != tkn.Type {
125 if tk.Type == parser.TokenError {
126 t.Errorf("test %d.%d: unexpected error: %s", n+1, m+1, tk.Data)
127 } else {
128 t.Errorf("test %d.%d: Incorrect type, expecting %d, got %d", n+1, m+1, tkn.Type, tk.Type)
129 }
130
131 break
132 } else if tk.Data != tkn.Data {
133 t.Errorf("test %d.%d: Incorrect data, expecting %q, got %q", n+1, m+1, tkn.Data, tk.Data)
134
135 break
136 }
137 }
138 }
139 }
140