diff --git a/_examples/precedenceclimbing/main.go b/_examples/precedenceclimbing/main.go index c98abf15..5652742c 100644 --- a/_examples/precedenceclimbing/main.go +++ b/_examples/precedenceclimbing/main.go @@ -103,7 +103,7 @@ func isOp(rn rune) bool { return strings.ContainsRune("+-*/^", rn) } -func peek(lex *lexer.PeekingLexer) lexer.Token { +func peek(lex *lexer.PeekingLexer) *lexer.Token { return lex.Peek() } diff --git a/go.mod b/go.mod index 22a51ed3..aad3e898 100644 --- a/go.mod +++ b/go.mod @@ -7,7 +7,4 @@ require ( github.com/alecthomas/repr v0.1.0 ) -require ( - github.com/hexops/gotextdiff v1.0.3 // indirect - github.com/mitchellh/mapstructure v1.5.0 -) +require github.com/hexops/gotextdiff v1.0.3 // indirect diff --git a/go.sum b/go.sum index e2a72a07..f5d4fd4f 100644 --- a/go.sum +++ b/go.sum @@ -1,10 +1,6 @@ -github.com/alecthomas/assert/v2 v2.0.3 h1:WKqJODfOiQG0nEJKFKzDIG3E29CN2/4zR9XGJzKIkbg= -github.com/alecthomas/assert/v2 v2.0.3/go.mod h1:b/+1DI2Q6NckYi+3mXyH3wFb8qG37K/DuK80n7WefXA= github.com/alecthomas/assert/v2 v2.1.0 h1:tbredtNcQnoSd3QBhQWI7QZ3XHOVkw1Moklp2ojoH/0= github.com/alecthomas/assert/v2 v2.1.0/go.mod h1:b/+1DI2Q6NckYi+3mXyH3wFb8qG37K/DuK80n7WefXA= github.com/alecthomas/repr v0.1.0 h1:ENn2e1+J3k09gyj2shc0dHr/yjaWSHRlrJ4DPMevDqE= github.com/alecthomas/repr v0.1.0/go.mod h1:2kn6fqh/zIyPLmm3ugklbEi5hg5wS435eygvNfaDQL8= github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= -github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= -github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= diff --git a/lexer/internal/conformance/conformance_test.go b/lexer/internal/conformance/conformance_test.go index 839bbc1d..63481656 100644 --- a/lexer/internal/conformance/conformance_test.go +++ b/lexer/internal/conformance/conformance_test.go @@ -244,8 +244,7 @@ func TestLexerConformance(t *testing.T) { testLexer(t, conformanceLexer) } -func genLexer(t *testing.T) { - t.Helper() +func genLexer(t *testing.T) { //nolint: thelper lexerJSON, err := json.Marshal(conformanceLexer) assert.NoError(t, err) cwd, err := os.Getwd() @@ -260,7 +259,7 @@ func genLexer(t *testing.T) { "--tags", "generated", "--name", "GeneratedConformance", "--output", generatedConformanceLexer) - cmd.Stdout = os.Stdout + cmd.Stdout = os.Stderr cmd.Stderr = os.Stderr w, err := cmd.StdinPipe() assert.NoError(t, err) diff --git a/lexer/peek.go b/lexer/peek.go index 4e7324da..8b14ba95 100644 --- a/lexer/peek.go +++ b/lexer/peek.go @@ -52,9 +52,9 @@ func (p *PeekingLexer) RawCursor() RawCursor { } // Next consumes and returns the next token. -func (p *PeekingLexer) Next() Token { +func (p *PeekingLexer) Next() *Token { for int(p.rawCursor) < len(p.tokens) { - t := p.tokens[p.rawCursor] + t := &p.tokens[p.rawCursor] p.rawCursor++ if p.elide[t.Type] { continue @@ -62,19 +62,19 @@ func (p *PeekingLexer) Next() Token { p.cursor++ return t } - return p.eof + return &p.eof } // Peek ahead at the next token. -func (p *PeekingLexer) Peek() Token { +func (p *PeekingLexer) Peek() *Token { for i := int(p.rawCursor); i < len(p.tokens); i++ { - t := p.tokens[i] + t := &p.tokens[i] if p.elide[t.Type] { continue } return t } - return p.eof + return &p.eof } // PeekAny peeks forward over elided and non-elided tokens. @@ -110,11 +110,11 @@ func (p *PeekingLexer) FastForward(rawCursor RawCursor) { // RawPeek peeks ahead at the next raw token. // // Unlike Peek, this will include elided tokens. -func (p *PeekingLexer) RawPeek() Token { +func (p *PeekingLexer) RawPeek() *Token { if int(p.rawCursor) < len(p.tokens) { - return p.tokens[p.rawCursor] + return &p.tokens[p.rawCursor] } - return p.eof + return &p.eof } // Clone creates a clone of this PeekingLexer at its current token. diff --git a/lexer/peek_test.go b/lexer/peek_test.go index e03e19dc..0cdbec9f 100644 --- a/lexer/peek_test.go +++ b/lexer/peek_test.go @@ -27,8 +27,8 @@ func TestUpgrade(t *testing.T) { tokens := []lexer.Token{t0, ts, t1} l, err := lexer.Upgrade(&staticLexer{tokens: tokens}, 3) require.NoError(t, err) - require.Equal(t, t0, l.Peek()) - require.Equal(t, t0, l.Peek()) + require.Equal(t, t0, *l.Peek()) + require.Equal(t, t0, *l.Peek()) require.Equal(t, tokens, l.Range(0, 3)) } @@ -49,6 +49,6 @@ func TestPeekAndNextAny(t *testing.T) { {Type: -2, Value: "last", Pos: lexer.Position{Line: 1, Column: 13, Offset: 12}}, } tok := plex.Next() - require.Equal(t, expected[0], tok) - require.Equal(t, expected[2], plex.Peek(), "should have skipped whitespace") + require.Equal(t, expected[0], *tok) + require.Equal(t, expected[2], *plex.Peek(), "should have skipped whitespace") } diff --git a/lexer/text_scanner_test.go b/lexer/text_scanner_test.go index e3e03bcf..a0ea7b4f 100644 --- a/lexer/text_scanner_test.go +++ b/lexer/text_scanner_test.go @@ -16,13 +16,13 @@ func TestLexer(t *testing.T) { helloPos := lexer.Position{Offset: 0, Line: 1, Column: 1} worldPos := lexer.Position{Offset: 6, Line: 1, Column: 7} eofPos := lexer.Position{Offset: 11, Line: 1, Column: 12} - require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "hello", Pos: helloPos}, lex.Peek()) - require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "hello", Pos: helloPos}, lex.Peek()) - require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "hello", Pos: helloPos}, lex.Next()) - require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "world", Pos: worldPos}, lex.Peek()) - require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "world", Pos: worldPos}, lex.Next()) - require.Equal(t, lexer.Token{Type: scanner.EOF, Value: "", Pos: eofPos}, lex.Peek()) - require.Equal(t, lexer.Token{Type: scanner.EOF, Value: "", Pos: eofPos}, lex.Next()) + require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "hello", Pos: helloPos}, *lex.Peek()) + require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "hello", Pos: helloPos}, *lex.Peek()) + require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "hello", Pos: helloPos}, *lex.Next()) + require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "world", Pos: worldPos}, *lex.Peek()) + require.Equal(t, lexer.Token{Type: scanner.Ident, Value: "world", Pos: worldPos}, *lex.Next()) + require.Equal(t, lexer.Token{Type: scanner.EOF, Value: "", Pos: eofPos}, *lex.Peek()) + require.Equal(t, lexer.Token{Type: scanner.EOF, Value: "", Pos: eofPos}, *lex.Next()) } func TestLexString(t *testing.T) { diff --git a/nodes.go b/nodes.go index 3d609ca7..1070e7bf 100644 --- a/nodes.go +++ b/nodes.go @@ -167,14 +167,14 @@ func (s *strct) Parse(ctx *parseContext, parent reflect.Value) (out []reflect.Va return []reflect.Value{sv}, ctx.Apply() } -func (s *strct) maybeInjectStartToken(token lexer.Token, v reflect.Value) { +func (s *strct) maybeInjectStartToken(token *lexer.Token, v reflect.Value) { if s.posFieldIndex == nil { return } v.FieldByIndex(s.posFieldIndex).Set(reflect.ValueOf(token.Pos)) } -func (s *strct) maybeInjectEndToken(token lexer.Token, v reflect.Value) { +func (s *strct) maybeInjectEndToken(token *lexer.Token, v reflect.Value) { if s.endPosFieldIndex == nil { return } @@ -382,7 +382,7 @@ func (s *sequence) Parse(ctx *parseContext, parent reflect.Value) (out []reflect return nil, nil } token := ctx.Peek() - return out, &UnexpectedTokenError{Unexpected: token, at: n} + return out, &UnexpectedTokenError{Unexpected: *token, at: n} } // Special-case for when children return an empty match. // Appending an empty, non-nil slice to a nil slice returns a nil slice. diff --git a/parser.go b/parser.go index 1da5004d..f61439f7 100644 --- a/parser.go +++ b/parser.go @@ -244,7 +244,7 @@ func (p *Parser[G]) parseOne(ctx *parseContext, parseNode node, rv reflect.Value } token := ctx.Peek() if !token.EOF() && !ctx.allowTrailing { - return ctx.DeepestError(&UnexpectedTokenError{Unexpected: token}) + return ctx.DeepestError(&UnexpectedTokenError{Unexpected: *token}) } return nil } @@ -262,7 +262,7 @@ func (p *Parser[G]) parseInto(ctx *parseContext, parseNode node, rv reflect.Valu } if pv == nil { token := ctx.Peek() - return ctx.DeepestError(&UnexpectedTokenError{Unexpected: token}) + return ctx.DeepestError(&UnexpectedTokenError{Unexpected: *token}) } return nil } @@ -270,7 +270,7 @@ func (p *Parser[G]) parseInto(ctx *parseContext, parseNode node, rv reflect.Valu func (p *Parser[G]) rootParseable(ctx *parseContext, parseable Parseable) error { if err := parseable.Parse(ctx.PeekingLexer); err != nil { if err == NextMatch { - err = &UnexpectedTokenError{Unexpected: ctx.Peek()} + err = &UnexpectedTokenError{Unexpected: *ctx.Peek()} } else { err = &ParseError{Msg: err.Error(), Pos: ctx.Peek().Pos} } @@ -278,7 +278,7 @@ func (p *Parser[G]) rootParseable(ctx *parseContext, parseable Parseable) error } peek := ctx.Peek() if !peek.EOF() && !ctx.allowTrailing { - return ctx.DeepestError(&UnexpectedTokenError{Unexpected: peek}) + return ctx.DeepestError(&UnexpectedTokenError{Unexpected: *peek}) } return nil } diff --git a/struct.go b/struct.go index 882d6d2d..62052448 100644 --- a/struct.go +++ b/struct.go @@ -63,7 +63,7 @@ func (s *structLexer) GetField(field int) structLexerField { } } -func (s *structLexer) Peek() (lexer.Token, error) { +func (s *structLexer) Peek() (*lexer.Token, error) { field := s.field lex := s.lexer for { @@ -74,7 +74,8 @@ func (s *structLexer) Peek() (lexer.Token, error) { } field++ if field >= s.NumField() { - return lexer.EOFToken(token.Pos), nil + t := lexer.EOFToken(token.Pos) + return &t, nil } ft := s.GetField(field).StructField tag := fieldLexerTag(ft) @@ -86,14 +87,15 @@ func (s *structLexer) Peek() (lexer.Token, error) { } } -func (s *structLexer) Next() (lexer.Token, error) { +func (s *structLexer) Next() (*lexer.Token, error) { token := s.lexer.Next() if !token.EOF() { token.Pos.Line = s.field + 1 return token, nil } if s.field+1 >= s.NumField() { - return lexer.EOFToken(token.Pos), nil + t := lexer.EOFToken(token.Pos) + return &t, nil } s.field++ ft := s.Field().StructField diff --git a/struct_test.go b/struct_test.go index b4d797a4..881c88f6 100644 --- a/struct_test.go +++ b/struct_test.go @@ -19,13 +19,13 @@ func TestStructLexerTokens(t *testing.T) { require.NoError(t, err) t12 := lexer.Token{Type: scanner.Int, Value: "12", Pos: lexer.Position{Filename: "testScanner", Line: 1, Column: 1}} t34 := lexer.Token{Type: scanner.Int, Value: "34", Pos: lexer.Position{Filename: "B", Line: 2, Column: 1}} - require.Equal(t, t12, mustPeek(scan)) + require.Equal(t, t12, *mustPeek(scan)) require.Equal(t, 0, scan.field) - require.Equal(t, t12, mustNext(scan)) + require.Equal(t, t12, *mustNext(scan)) - require.Equal(t, t34, mustPeek(scan)) + require.Equal(t, t34, *mustPeek(scan)) require.Equal(t, 0, scan.field) - require.Equal(t, t34, mustNext(scan)) + require.Equal(t, t34, *mustNext(scan)) require.Equal(t, 1, scan.field) require.True(t, mustNext(scan).EOF()) @@ -75,7 +75,7 @@ func TestCollectFieldIndexes(t *testing.T) { require.Equal(t, [][]int{{0, 0}, {0, 1}, {1}}, indexes) } -func mustPeek(scan *structLexer) lexer.Token { +func mustPeek(scan *structLexer) *lexer.Token { token, err := scan.Peek() if err != nil { panic(err) @@ -83,7 +83,7 @@ func mustPeek(scan *structLexer) lexer.Token { return token } -func mustNext(scan *structLexer) lexer.Token { // nolint: interfacer +func mustNext(scan *structLexer) *lexer.Token { // nolint: interfacer token, err := scan.Next() if err != nil { panic(err)