-
Notifications
You must be signed in to change notification settings - Fork 8
/
tokenizer.go
123 lines (114 loc) · 2.57 KB
/
tokenizer.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
package pagser
import (
"errors"
"fmt"
"regexp"
"strings"
)
type tokenState int
const (
tokenInput tokenState = iota
tokenStartQuote
tokenEndQoute
tokenComma
)
//->fn()
//->fn(xxx)
//->fn('xxx')
//->fn('xxx\'xxx', 'xxx,xxx')
var rxFunc = regexp.MustCompile("^\\s*([a-zA-Z]+)\\s*(\\(([^\\)]*)\\))?\\s*$")
// tagTokenizer struct tag info
type tagTokenizer struct {
Selector string
FuncName string
FuncParams []string
}
func (p *Pagser) newTag(tagValue string) (*tagTokenizer, error) {
//fmt.Println("tag value: ", tagValue)
tag := &tagTokenizer{}
if tagValue == "" {
return tag, nil
}
selectors := strings.Split(tagValue, p.Config.FuncSymbol)
funcValue := ""
for i := 0; i < len(selectors); i++ {
switch i {
case 0:
tag.Selector = strings.TrimSpace(selectors[i])
case 1:
funcValue = selectors[i]
}
}
matches := rxFunc.FindStringSubmatch(funcValue)
if len(matches) < 3 {
return tag, nil
}
tag.FuncName = strings.TrimSpace(matches[1])
//tag.FuncParams = strings.Split(matches[2], ",")
params, err := parseFuncParamTokens(matches[3])
if err != nil {
return nil, fmt.Errorf("tag=`%v` is invalid: %v", tagValue, err)
}
tag.FuncParams = params
if p.Config.Debug {
fmt.Printf("----- debug -----\n`%v`\n%v\n", tagValue, prettyJson(tag))
}
return tag, nil
}
func parseFuncParamTokens(text string) ([]string, error) {
tokens := make([]string, 0)
textLen := len(text)
token := strings.Builder{}
var currentState tokenState
for pos := 0; pos < textLen; pos++ {
ch := rune(text[pos])
switch ch {
case '\'':
if currentState == tokenStartQuote {
tokens = append(tokens, token.String())
token.Reset()
currentState = tokenEndQoute
continue
}
if token.Len() <= 0 {
currentState = tokenStartQuote
continue
}
case ',':
if currentState == tokenStartQuote {
token.WriteRune(ch)
continue
}
if currentState == tokenComma || token.Len() > 0 {
tokens = append(tokens, token.String())
token.Reset()
currentState = tokenComma
}
continue
case '\\':
if currentState == tokenStartQuote && pos+1 < textLen {
//escape character -> "\'"
nextCh := rune(text[pos+1])
if nextCh == '\'' {
token.WriteRune(nextCh)
pos += 1
continue
}
}
case ' ':
if (currentState != tokenStartQuote) && token.Len() <= 0 {
continue
}
}
token.WriteRune(ch)
}
if currentState == tokenStartQuote {
return []string{}, errors.New("syntax error, single quote not closed")
}
//end
if token.Len() > 0 {
tokens = append(tokens, token.String())
token.Reset()
}
return tokens, nil
}