Skip to content

Commit

Permalink
refactor: change PeekingLexer to return internal pointers
Browse files Browse the repository at this point in the history
  • Loading branch information
alecthomas committed Nov 16, 2022
1 parent c854074 commit e748387
Show file tree
Hide file tree
Showing 11 changed files with 43 additions and 49 deletions.
2 changes: 1 addition & 1 deletion _examples/precedenceclimbing/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ func isOp(rn rune) bool {
return strings.ContainsRune("+-*/^", rn)
}

func peek(lex *lexer.PeekingLexer) lexer.Token {
func peek(lex *lexer.PeekingLexer) *lexer.Token {
return lex.Peek()
}

Expand Down
5 changes: 1 addition & 4 deletions go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,4 @@ require (
github.com/alecthomas/repr v0.1.0
)

require (
github.com/hexops/gotextdiff v1.0.3 // indirect
github.com/mitchellh/mapstructure v1.5.0
)
require github.com/hexops/gotextdiff v1.0.3 // indirect
4 changes: 0 additions & 4 deletions go.sum
Original file line number Diff line number Diff line change
@@ -1,10 +1,6 @@
github.com/alecthomas/assert/v2 v2.0.3 h1:WKqJODfOiQG0nEJKFKzDIG3E29CN2/4zR9XGJzKIkbg=
github.com/alecthomas/assert/v2 v2.0.3/go.mod h1:b/+1DI2Q6NckYi+3mXyH3wFb8qG37K/DuK80n7WefXA=
github.com/alecthomas/assert/v2 v2.1.0 h1:tbredtNcQnoSd3QBhQWI7QZ3XHOVkw1Moklp2ojoH/0=
github.com/alecthomas/assert/v2 v2.1.0/go.mod h1:b/+1DI2Q6NckYi+3mXyH3wFb8qG37K/DuK80n7WefXA=
github.com/alecthomas/repr v0.1.0 h1:ENn2e1+J3k09gyj2shc0dHr/yjaWSHRlrJ4DPMevDqE=
github.com/alecthomas/repr v0.1.0/go.mod h1:2kn6fqh/zIyPLmm3ugklbEi5hg5wS435eygvNfaDQL8=
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
5 changes: 2 additions & 3 deletions lexer/internal/conformance/conformance_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -244,8 +244,7 @@ func TestLexerConformance(t *testing.T) {
testLexer(t, conformanceLexer)
}

func genLexer(t *testing.T) {
t.Helper()
func genLexer(t *testing.T) { //nolint: thelper
lexerJSON, err := json.Marshal(conformanceLexer)
assert.NoError(t, err)
cwd, err := os.Getwd()
Expand All @@ -260,7 +259,7 @@ func genLexer(t *testing.T) {
"--tags", "generated",
"--name", "GeneratedConformance",
"--output", generatedConformanceLexer)
cmd.Stdout = os.Stdout
cmd.Stdout = os.Stderr
cmd.Stderr = os.Stderr
w, err := cmd.StdinPipe()
assert.NoError(t, err)
Expand Down
18 changes: 9 additions & 9 deletions lexer/peek.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,29 +52,29 @@ func (p *PeekingLexer) RawCursor() RawCursor {
}

// Next consumes and returns the next token.
func (p *PeekingLexer) Next() Token {
func (p *PeekingLexer) Next() *Token {
for int(p.rawCursor) < len(p.tokens) {
t := p.tokens[p.rawCursor]
t := &p.tokens[p.rawCursor]
p.rawCursor++
if p.elide[t.Type] {
continue
}
p.cursor++
return t
}
return p.eof
return &p.eof
}

// Peek ahead at the next token.
func (p *PeekingLexer) Peek() Token {
func (p *PeekingLexer) Peek() *Token {
for i := int(p.rawCursor); i < len(p.tokens); i++ {
t := p.tokens[i]
t := &p.tokens[i]
if p.elide[t.Type] {
continue
}
return t
}
return p.eof
return &p.eof
}

// PeekAny peeks forward over elided and non-elided tokens.
Expand Down Expand Up @@ -110,11 +110,11 @@ func (p *PeekingLexer) FastForward(rawCursor RawCursor) {
// RawPeek peeks ahead at the next raw token.
//
// Unlike Peek, this will include elided tokens.
func (p *PeekingLexer) RawPeek() Token {
func (p *PeekingLexer) RawPeek() *Token {
if int(p.rawCursor) < len(p.tokens) {
return p.tokens[p.rawCursor]
return &p.tokens[p.rawCursor]
}
return p.eof
return &p.eof
}

// Clone creates a clone of this PeekingLexer at its current token.
Expand Down
8 changes: 4 additions & 4 deletions lexer/peek_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ func TestUpgrade(t *testing.T) {
tokens := []lexer.Token{t0, ts, t1}
l, err := lexer.Upgrade(&staticLexer{tokens: tokens}, 3)
require.NoError(t, err)
require.Equal(t, t0, l.Peek())
require.Equal(t, t0, l.Peek())
require.Equal(t, t0, *l.Peek())
require.Equal(t, t0, *l.Peek())
require.Equal(t, tokens, l.Range(0, 3))
}

Expand All @@ -49,6 +49,6 @@ func TestPeekAndNextAny(t *testing.T) {
{Type: -2, Value: "last", Pos: lexer.Position{Line: 1, Column: 13, Offset: 12}},
}
tok := plex.Next()
require.Equal(t, expected[0], tok)
require.Equal(t, expected[2], plex.Peek(), "should have skipped whitespace")
require.Equal(t, expected[0], *tok)
require.Equal(t, expected[2], *plex.Peek(), "should have skipped whitespace")
}
14 changes: 7 additions & 7 deletions lexer/text_scanner_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,13 @@ func TestLexer(t *testing.T) {
helloPos := lexer.Position{Offset: 0, Line: 1, Column: 1}
worldPos := lexer.Position{Offset: 6, Line: 1, Column: 7}
eofPos := lexer.Position{Offset: 11, Line: 1, Column: 12}
require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "hello", Pos: helloPos}, lex.Peek())
require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "hello", Pos: helloPos}, lex.Peek())
require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "hello", Pos: helloPos}, lex.Next())
require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "world", Pos: worldPos}, lex.Peek())
require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "world", Pos: worldPos}, lex.Next())
require.Equal(t, lexer.Token{Type: scanner.EOF, Value: "", Pos: eofPos}, lex.Peek())
require.Equal(t, lexer.Token{Type: scanner.EOF, Value: "", Pos: eofPos}, lex.Next())
require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "hello", Pos: helloPos}, *lex.Peek())
require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "hello", Pos: helloPos}, *lex.Peek())
require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "hello", Pos: helloPos}, *lex.Next())
require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "world", Pos: worldPos}, *lex.Peek())
require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "world", Pos: worldPos}, *lex.Next())
require.Equal(t, lexer.Token{Type: scanner.EOF, Value: "", Pos: eofPos}, *lex.Peek())
require.Equal(t, lexer.Token{Type: scanner.EOF, Value: "", Pos: eofPos}, *lex.Next())
}

func TestLexString(t *testing.T) {
Expand Down
6 changes: 3 additions & 3 deletions nodes.go
Original file line number Diff line number Diff line change
Expand Up @@ -167,14 +167,14 @@ func (s *strct) Parse(ctx *parseContext, parent reflect.Value) (out []reflect.Va
return []reflect.Value{sv}, ctx.Apply()
}

func (s *strct) maybeInjectStartToken(token lexer.Token, v reflect.Value) {
func (s *strct) maybeInjectStartToken(token *lexer.Token, v reflect.Value) {
if s.posFieldIndex == nil {
return
}
v.FieldByIndex(s.posFieldIndex).Set(reflect.ValueOf(token.Pos))
}

func (s *strct) maybeInjectEndToken(token lexer.Token, v reflect.Value) {
func (s *strct) maybeInjectEndToken(token *lexer.Token, v reflect.Value) {
if s.endPosFieldIndex == nil {
return
}
Expand Down Expand Up @@ -382,7 +382,7 @@ func (s *sequence) Parse(ctx *parseContext, parent reflect.Value) (out []reflect
return nil, nil
}
token := ctx.Peek()
return out, &UnexpectedTokenError{Unexpected: token, at: n}
return out, &UnexpectedTokenError{Unexpected: *token, at: n}
}
// Special-case for when children return an empty match.
// Appending an empty, non-nil slice to a nil slice returns a nil slice.
Expand Down
8 changes: 4 additions & 4 deletions parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ func (p *Parser[G]) parseOne(ctx *parseContext, parseNode node, rv reflect.Value
}
token := ctx.Peek()
if !token.EOF() && !ctx.allowTrailing {
return ctx.DeepestError(&UnexpectedTokenError{Unexpected: token})
return ctx.DeepestError(&UnexpectedTokenError{Unexpected: *token})
}
return nil
}
Expand All @@ -262,23 +262,23 @@ func (p *Parser[G]) parseInto(ctx *parseContext, parseNode node, rv reflect.Valu
}
if pv == nil {
token := ctx.Peek()
return ctx.DeepestError(&UnexpectedTokenError{Unexpected: token})
return ctx.DeepestError(&UnexpectedTokenError{Unexpected: *token})
}
return nil
}

func (p *Parser[G]) rootParseable(ctx *parseContext, parseable Parseable) error {
if err := parseable.Parse(ctx.PeekingLexer); err != nil {
if err == NextMatch {
err = &UnexpectedTokenError{Unexpected: ctx.Peek()}
err = &UnexpectedTokenError{Unexpected: *ctx.Peek()}
} else {
err = &ParseError{Msg: err.Error(), Pos: ctx.Peek().Pos}
}
return ctx.DeepestError(err)
}
peek := ctx.Peek()
if !peek.EOF() && !ctx.allowTrailing {
return ctx.DeepestError(&UnexpectedTokenError{Unexpected: peek})
return ctx.DeepestError(&UnexpectedTokenError{Unexpected: *peek})
}
return nil
}
Expand Down
10 changes: 6 additions & 4 deletions struct.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ func (s *structLexer) GetField(field int) structLexerField {
}
}

func (s *structLexer) Peek() (lexer.Token, error) {
func (s *structLexer) Peek() (*lexer.Token, error) {
field := s.field
lex := s.lexer
for {
Expand All @@ -74,7 +74,8 @@ func (s *structLexer) Peek() (lexer.Token, error) {
}
field++
if field >= s.NumField() {
return lexer.EOFToken(token.Pos), nil
t := lexer.EOFToken(token.Pos)
return &t, nil
}
ft := s.GetField(field).StructField
tag := fieldLexerTag(ft)
Expand All @@ -86,14 +87,15 @@ func (s *structLexer) Peek() (lexer.Token, error) {
}
}

func (s *structLexer) Next() (lexer.Token, error) {
func (s *structLexer) Next() (*lexer.Token, error) {
token := s.lexer.Next()
if !token.EOF() {
token.Pos.Line = s.field + 1
return token, nil
}
if s.field+1 >= s.NumField() {
return lexer.EOFToken(token.Pos), nil
t := lexer.EOFToken(token.Pos)
return &t, nil
}
s.field++
ft := s.Field().StructField
Expand Down
12 changes: 6 additions & 6 deletions struct_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,13 @@ func TestStructLexerTokens(t *testing.T) {
require.NoError(t, err)
t12 := lexer.Token{Type: scanner.Int, Value: "12", Pos: lexer.Position{Filename: "testScanner", Line: 1, Column: 1}}
t34 := lexer.Token{Type: scanner.Int, Value: "34", Pos: lexer.Position{Filename: "B", Line: 2, Column: 1}}
require.Equal(t, t12, mustPeek(scan))
require.Equal(t, t12, *mustPeek(scan))
require.Equal(t, 0, scan.field)
require.Equal(t, t12, mustNext(scan))
require.Equal(t, t12, *mustNext(scan))

require.Equal(t, t34, mustPeek(scan))
require.Equal(t, t34, *mustPeek(scan))
require.Equal(t, 0, scan.field)
require.Equal(t, t34, mustNext(scan))
require.Equal(t, t34, *mustNext(scan))
require.Equal(t, 1, scan.field)

require.True(t, mustNext(scan).EOF())
Expand Down Expand Up @@ -75,15 +75,15 @@ func TestCollectFieldIndexes(t *testing.T) {
require.Equal(t, [][]int{{0, 0}, {0, 1}, {1}}, indexes)
}

func mustPeek(scan *structLexer) lexer.Token {
func mustPeek(scan *structLexer) *lexer.Token {
token, err := scan.Peek()
if err != nil {
panic(err)
}
return token
}

func mustNext(scan *structLexer) lexer.Token { // nolint: interfacer
func mustNext(scan *structLexer) *lexer.Token { // nolint: interfacer
token, err := scan.Next()
if err != nil {
panic(err)
Expand Down

0 comments on commit e748387

Please sign in to comment.