-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtoken.go
72 lines (64 loc) · 1.67 KB
/
token.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
package wordcase
import (
"strings"
)
// Tokens represents a string broken up by boundaries defined by IsRuneSeparator functions
type Tokens []string
// String returns all tokens separated by a space as a string
func (t Tokens) String() string {
return strings.Join(t, " ")
}
// Format applies to the given function to the tokens specified by the 'indexes' list
func (t Tokens) Format(fn Formatter, items TokenSelector) Tokens {
var r Tokens
idx := map[int]bool{}
for _, i := range items(t) {
idx[i] = true
}
for i, x := range t {
if idx[i] {
r = append(r, fn(x))
} else {
r = append(r, x)
}
}
return r
}
// FormatAll applies to the given function to all tokens.
//
// convenience function that does the same as t.Format(fn, ToAll(t)) (just faster and less verbosely)
func (t Tokens) FormatAll(fn Formatter) Tokens {
var r Tokens
for _, x := range t {
r = append(r, fn(x))
}
return r
}
// Tokenize applies the given IsRuneSeparator function to each token, returning a new token set
func (t Tokens) Tokenize(test SeparatorTest, isSeparator IsRuneSeparator, rmSep bool) Tokens {
var r Tokens
for _, x := range t {
r = append(r, TokenizeString(x, test, isSeparator, rmSep)...)
}
return r
}
// TokenizeString breaks a string into a set of tokens using the rules supplied
func TokenizeString(s string, test SeparatorTest, sepRune IsRuneSeparator, rmSep bool) Tokens {
var b strings.Builder
res := Tokens{}
r := []rune(s)
for i, n := range r {
isSep := test(s, i, sepRune)
if isSep && b.Len() > 0 {
res = append(res, b.String())
b.Reset()
}
if !rmSep || !sepRune(n) {
b.WriteRune(n)
}
}
if b.Len() > 0 {
res = append(res, b.String())
}
return res
}