From 030812f079b6d1a4b08ff6a26eb6455a27221910 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Mon, 6 Jun 2022 12:42:45 -0400 Subject: [PATCH] internal: remove unneeded FileSets This change replaces various uses of FileSet with either nothing (when the parameter wasn't really needed) or token.File. Notably, astutil.Imports was being used to extract the imports of a file (available at ast.File.Imports), forcing a number of wrappers to have a FileSet parameter. Also, simplify various expressions file.Position().Line to file.Line(). Change-Id: I19fe86a18aba50352275f77ed737513744d3930c Reviewed-on: https://go-review.googlesource.com/c/tools/+/410366 Run-TryBot: Alan Donovan TryBot-Result: Gopher Robot Reviewed-by: Robert Findley gopls-CI: kokoro --- internal/analysisinternal/analysis.go | 36 ++++++++--------- internal/imports/imports.go | 17 +++++--- internal/imports/sortimports.go | 39 +++++++++++-------- .../lsp/analysis/fillreturns/fillreturns.go | 3 +- .../lsp/analysis/fillstruct/fillstruct.go | 22 +++++------ .../lsp/analysis/undeclaredname/undeclared.go | 2 +- internal/lsp/source/extract.go | 13 +++---- internal/lsp/source/stub.go | 5 ++- internal/lsp/source/util.go | 6 +-- 9 files changed, 73 insertions(+), 70 deletions(-) diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go index 78ee2c06bea..3f1e573342f 100644 --- a/internal/analysisinternal/analysis.go +++ b/internal/analysisinternal/analysis.go @@ -11,9 +11,8 @@ import ( "go/ast" "go/token" "go/types" - "strings" + "strconv" - "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/internal/lsp/fuzzy" ) @@ -37,7 +36,7 @@ func TypeErrorEndPos(fset *token.FileSet, src []byte, start token.Pos) token.Pos return end } -func ZeroValue(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { +func ZeroValue(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { under := typ if n, ok := typ.(*types.Named); ok { under = n.Underlying() @@ -57,7 +56,7 @@ func ZeroValue(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.T case *types.Chan, *types.Interface, *types.Map, *types.Pointer, *types.Signature, *types.Slice, *types.Array: return ast.NewIdent("nil") case *types.Struct: - texpr := TypeExpr(fset, f, pkg, typ) // typ because we want the name here. + texpr := TypeExpr(f, pkg, typ) // typ because we want the name here. if texpr == nil { return nil } @@ -81,7 +80,7 @@ func IsZeroValue(expr ast.Expr) bool { } } -func TypeExpr(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { +func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { switch t := typ.(type) { case *types.Basic: switch t.Kind() { @@ -91,7 +90,7 @@ func TypeExpr(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Ty return ast.NewIdent(t.Name()) } case *types.Pointer: - x := TypeExpr(fset, f, pkg, t.Elem()) + x := TypeExpr(f, pkg, t.Elem()) if x == nil { return nil } @@ -100,7 +99,7 @@ func TypeExpr(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Ty X: x, } case *types.Array: - elt := TypeExpr(fset, f, pkg, t.Elem()) + elt := TypeExpr(f, pkg, t.Elem()) if elt == nil { return nil } @@ -112,7 +111,7 @@ func TypeExpr(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Ty Elt: elt, } case *types.Slice: - elt := TypeExpr(fset, f, pkg, t.Elem()) + elt := TypeExpr(f, pkg, t.Elem()) if elt == nil { return nil } @@ -120,8 +119,8 @@ func TypeExpr(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Ty Elt: elt, } case *types.Map: - key := TypeExpr(fset, f, pkg, t.Key()) - value := TypeExpr(fset, f, pkg, t.Elem()) + key := TypeExpr(f, pkg, t.Key()) + value := TypeExpr(f, pkg, t.Elem()) if key == nil || value == nil { return nil } @@ -134,7 +133,7 @@ func TypeExpr(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Ty if t.Dir() == types.SendRecv { dir = ast.SEND | ast.RECV } - value := TypeExpr(fset, f, pkg, t.Elem()) + value := TypeExpr(f, pkg, t.Elem()) if value == nil { return nil } @@ -145,7 +144,7 @@ func TypeExpr(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Ty case *types.Signature: var params []*ast.Field for i := 0; i < t.Params().Len(); i++ { - p := TypeExpr(fset, f, pkg, t.Params().At(i).Type()) + p := TypeExpr(f, pkg, t.Params().At(i).Type()) if p == nil { return nil } @@ -160,7 +159,7 @@ func TypeExpr(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Ty } var returns []*ast.Field for i := 0; i < t.Results().Len(); i++ { - r := TypeExpr(fset, f, pkg, t.Results().At(i).Type()) + r := TypeExpr(f, pkg, t.Results().At(i).Type()) if r == nil { return nil } @@ -184,13 +183,12 @@ func TypeExpr(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Ty return ast.NewIdent(t.Obj().Name()) } pkgName := t.Obj().Pkg().Name() + // If the file already imports the package under another name, use that. - for _, group := range astutil.Imports(fset, f) { - for _, cand := range group { - if strings.Trim(cand.Path.Value, `"`) == t.Obj().Pkg().Path() { - if cand.Name != nil && cand.Name.Name != "" { - pkgName = cand.Name.Name - } + for _, cand := range f.Imports { + if path, _ := strconv.Unquote(cand.Path.Value); path == t.Obj().Pkg().Path() { + if cand.Name != nil && cand.Name.Name != "" { + pkgName = cand.Name.Name } } } diff --git a/internal/imports/imports.go b/internal/imports/imports.go index 79026726a24..95a88383a79 100644 --- a/internal/imports/imports.go +++ b/internal/imports/imports.go @@ -103,12 +103,17 @@ func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, e return formatFile(fileSet, file, src, nil, opt) } -func formatFile(fileSet *token.FileSet, file *ast.File, src []byte, adjust func(orig []byte, src []byte) []byte, opt *Options) ([]byte, error) { - mergeImports(fileSet, file) - sortImports(opt.LocalPrefix, fileSet, file) - imps := astutil.Imports(fileSet, file) +// formatFile formats the file syntax tree. +// It may mutate the token.FileSet. +// +// If an adjust function is provided, it is called after formatting +// with the original source (formatFile's src parameter) and the +// formatted file, and returns the postpocessed result. +func formatFile(fset *token.FileSet, file *ast.File, src []byte, adjust func(orig []byte, src []byte) []byte, opt *Options) ([]byte, error) { + mergeImports(file) + sortImports(opt.LocalPrefix, fset.File(file.Pos()), file) var spacesBefore []string // import paths we need spaces before - for _, impSection := range imps { + for _, impSection := range astutil.Imports(fset, file) { // Within each block of contiguous imports, see if any // import lines are in different group numbers. If so, // we'll need to put a space between them so it's @@ -132,7 +137,7 @@ func formatFile(fileSet *token.FileSet, file *ast.File, src []byte, adjust func( printConfig := &printer.Config{Mode: printerMode, Tabwidth: opt.TabWidth} var buf bytes.Buffer - err := printConfig.Fprint(&buf, fileSet, file) + err := printConfig.Fprint(&buf, fset, file) if err != nil { return nil, err } diff --git a/internal/imports/sortimports.go b/internal/imports/sortimports.go index c5a70c0bb36..85144db1dfa 100644 --- a/internal/imports/sortimports.go +++ b/internal/imports/sortimports.go @@ -3,6 +3,7 @@ // license that can be found in the LICENSE file. // Hacked up copy of go/ast/import.go +// Modified to use a single token.File in preference to a FileSet. package imports @@ -16,7 +17,9 @@ import ( // sortImports sorts runs of consecutive import lines in import blocks in f. // It also removes duplicate imports when it is possible to do so without data loss. -func sortImports(localPrefix string, fset *token.FileSet, f *ast.File) { +// +// It may mutate the token.File. +func sortImports(localPrefix string, tokFile *token.File, f *ast.File) { for i, d := range f.Decls { d, ok := d.(*ast.GenDecl) if !ok || d.Tok != token.IMPORT { @@ -39,21 +42,21 @@ func sortImports(localPrefix string, fset *token.FileSet, f *ast.File) { i := 0 specs := d.Specs[:0] for j, s := range d.Specs { - if j > i && fset.Position(s.Pos()).Line > 1+fset.Position(d.Specs[j-1].End()).Line { + if j > i && tokFile.Line(s.Pos()) > 1+tokFile.Line(d.Specs[j-1].End()) { // j begins a new run. End this one. - specs = append(specs, sortSpecs(localPrefix, fset, f, d.Specs[i:j])...) + specs = append(specs, sortSpecs(localPrefix, tokFile, f, d.Specs[i:j])...) i = j } } - specs = append(specs, sortSpecs(localPrefix, fset, f, d.Specs[i:])...) + specs = append(specs, sortSpecs(localPrefix, tokFile, f, d.Specs[i:])...) d.Specs = specs // Deduping can leave a blank line before the rparen; clean that up. if len(d.Specs) > 0 { lastSpec := d.Specs[len(d.Specs)-1] - lastLine := fset.PositionFor(lastSpec.Pos(), false).Line - if rParenLine := fset.PositionFor(d.Rparen, false).Line; rParenLine > lastLine+1 { - fset.File(d.Rparen).MergeLine(rParenLine - 1) + lastLine := tokFile.PositionFor(lastSpec.Pos(), false).Line + if rParenLine := tokFile.PositionFor(d.Rparen, false).Line; rParenLine > lastLine+1 { + tokFile.MergeLine(rParenLine - 1) // has side effects! } } } @@ -62,7 +65,7 @@ func sortImports(localPrefix string, fset *token.FileSet, f *ast.File) { // mergeImports merges all the import declarations into the first one. // Taken from golang.org/x/tools/ast/astutil. // This does not adjust line numbers properly -func mergeImports(fset *token.FileSet, f *ast.File) { +func mergeImports(f *ast.File) { if len(f.Decls) <= 1 { return } @@ -144,7 +147,9 @@ type posSpan struct { End token.Pos } -func sortSpecs(localPrefix string, fset *token.FileSet, f *ast.File, specs []ast.Spec) []ast.Spec { +// sortSpecs sorts the import specs within each import decl. +// It may mutate the token.File. +func sortSpecs(localPrefix string, tokFile *token.File, f *ast.File, specs []ast.Spec) []ast.Spec { // Can't short-circuit here even if specs are already sorted, // since they might yet need deduplication. // A lone import, however, may be safely ignored. @@ -160,7 +165,7 @@ func sortSpecs(localPrefix string, fset *token.FileSet, f *ast.File, specs []ast // Identify comments in this range. // Any comment from pos[0].Start to the final line counts. - lastLine := fset.Position(pos[len(pos)-1].End).Line + lastLine := tokFile.Line(pos[len(pos)-1].End) cstart := len(f.Comments) cend := len(f.Comments) for i, g := range f.Comments { @@ -170,7 +175,7 @@ func sortSpecs(localPrefix string, fset *token.FileSet, f *ast.File, specs []ast if i < cstart { cstart = i } - if fset.Position(g.End()).Line > lastLine { + if tokFile.Line(g.End()) > lastLine { cend = i break } @@ -203,7 +208,7 @@ func sortSpecs(localPrefix string, fset *token.FileSet, f *ast.File, specs []ast deduped = append(deduped, s) } else { p := s.Pos() - fset.File(p).MergeLine(fset.Position(p).Line) + tokFile.MergeLine(tokFile.Line(p)) // has side effects! } } specs = deduped @@ -234,21 +239,21 @@ func sortSpecs(localPrefix string, fset *token.FileSet, f *ast.File, specs []ast // Fixup comments can insert blank lines, because import specs are on different lines. // We remove those blank lines here by merging import spec to the first import spec line. - firstSpecLine := fset.Position(specs[0].Pos()).Line + firstSpecLine := tokFile.Line(specs[0].Pos()) for _, s := range specs[1:] { p := s.Pos() - line := fset.File(p).Line(p) + line := tokFile.Line(p) for previousLine := line - 1; previousLine >= firstSpecLine; { // MergeLine can panic. Avoid the panic at the cost of not removing the blank line // golang/go#50329 - if previousLine > 0 && previousLine < fset.File(p).LineCount() { - fset.File(p).MergeLine(previousLine) + if previousLine > 0 && previousLine < tokFile.LineCount() { + tokFile.MergeLine(previousLine) // has side effects! previousLine-- } else { // try to gather some data to diagnose how this could happen req := "Please report what the imports section of your go file looked like." log.Printf("panic avoided: first:%d line:%d previous:%d max:%d. %s", - firstSpecLine, line, previousLine, fset.File(p).LineCount(), req) + firstSpecLine, line, previousLine, tokFile.LineCount(), req) } } } diff --git a/internal/lsp/analysis/fillreturns/fillreturns.go b/internal/lsp/analysis/fillreturns/fillreturns.go index 3299f74e989..72fe65d79ca 100644 --- a/internal/lsp/analysis/fillreturns/fillreturns.go +++ b/internal/lsp/analysis/fillreturns/fillreturns.go @@ -193,8 +193,7 @@ outer: // generate a zero value. value := analysisinternal.FindBestMatch(retTyp.String(), idents) if value == nil { - value = analysisinternal.ZeroValue( - pass.Fset, file, pass.Pkg, retTyp) + value = analysisinternal.ZeroValue(file, pass.Pkg, retTyp) } if value == nil { return nil, nil diff --git a/internal/lsp/analysis/fillstruct/fillstruct.go b/internal/lsp/analysis/fillstruct/fillstruct.go index 6987f786d2b..f160d4422ae 100644 --- a/internal/lsp/analysis/fillstruct/fillstruct.go +++ b/internal/lsp/analysis/fillstruct/fillstruct.go @@ -256,7 +256,7 @@ func SuggestedFix(fset *token.FileSet, rng span.Range, content []byte, file *ast // NOTE: We currently match on the name of the field key rather than the field type. value := analysisinternal.FindBestMatch(obj.Field(i).Name(), idents) if value == nil { - value = populateValue(fset, file, pkg, fieldTyp) + value = populateValue(file, pkg, fieldTyp) } if value == nil { return nil, nil @@ -354,7 +354,7 @@ func indent(str, ind []byte) []byte { // // The reasoning here is that users will call fillstruct with the intention of // initializing the struct, in which case setting these fields to nil has no effect. -func populateValue(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { +func populateValue(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { under := typ if n, ok := typ.(*types.Named); ok { under = n.Underlying() @@ -374,8 +374,8 @@ func populateValue(fset *token.FileSet, f *ast.File, pkg *types.Package, typ typ panic("unknown basic type") } case *types.Map: - k := analysisinternal.TypeExpr(fset, f, pkg, u.Key()) - v := analysisinternal.TypeExpr(fset, f, pkg, u.Elem()) + k := analysisinternal.TypeExpr(f, pkg, u.Key()) + v := analysisinternal.TypeExpr(f, pkg, u.Elem()) if k == nil || v == nil { return nil } @@ -386,7 +386,7 @@ func populateValue(fset *token.FileSet, f *ast.File, pkg *types.Package, typ typ }, } case *types.Slice: - s := analysisinternal.TypeExpr(fset, f, pkg, u.Elem()) + s := analysisinternal.TypeExpr(f, pkg, u.Elem()) if s == nil { return nil } @@ -396,7 +396,7 @@ func populateValue(fset *token.FileSet, f *ast.File, pkg *types.Package, typ typ }, } case *types.Array: - a := analysisinternal.TypeExpr(fset, f, pkg, u.Elem()) + a := analysisinternal.TypeExpr(f, pkg, u.Elem()) if a == nil { return nil } @@ -409,7 +409,7 @@ func populateValue(fset *token.FileSet, f *ast.File, pkg *types.Package, typ typ }, } case *types.Chan: - v := analysisinternal.TypeExpr(fset, f, pkg, u.Elem()) + v := analysisinternal.TypeExpr(f, pkg, u.Elem()) if v == nil { return nil } @@ -427,7 +427,7 @@ func populateValue(fset *token.FileSet, f *ast.File, pkg *types.Package, typ typ }, } case *types.Struct: - s := analysisinternal.TypeExpr(fset, f, pkg, typ) + s := analysisinternal.TypeExpr(f, pkg, typ) if s == nil { return nil } @@ -437,7 +437,7 @@ func populateValue(fset *token.FileSet, f *ast.File, pkg *types.Package, typ typ case *types.Signature: var params []*ast.Field for i := 0; i < u.Params().Len(); i++ { - p := analysisinternal.TypeExpr(fset, f, pkg, u.Params().At(i).Type()) + p := analysisinternal.TypeExpr(f, pkg, u.Params().At(i).Type()) if p == nil { return nil } @@ -452,7 +452,7 @@ func populateValue(fset *token.FileSet, f *ast.File, pkg *types.Package, typ typ } var returns []*ast.Field for i := 0; i < u.Results().Len(); i++ { - r := analysisinternal.TypeExpr(fset, f, pkg, u.Results().At(i).Type()) + r := analysisinternal.TypeExpr(f, pkg, u.Results().At(i).Type()) if r == nil { return nil } @@ -487,7 +487,7 @@ func populateValue(fset *token.FileSet, f *ast.File, pkg *types.Package, typ typ default: return &ast.UnaryExpr{ Op: token.AND, - X: populateValue(fset, f, pkg, u.Elem()), + X: populateValue(f, pkg, u.Elem()), } } case *types.Interface: diff --git a/internal/lsp/analysis/undeclaredname/undeclared.go b/internal/lsp/analysis/undeclaredname/undeclared.go index 22b552c374d..faa14091aee 100644 --- a/internal/lsp/analysis/undeclaredname/undeclared.go +++ b/internal/lsp/analysis/undeclaredname/undeclared.go @@ -268,7 +268,7 @@ func newFunctionDeclaration(path []ast.Node, file *ast.File, pkg *types.Package, Names: []*ast.Ident{ ast.NewIdent(name), }, - Type: analysisinternal.TypeExpr(fset, file, pkg, paramTypes[i]), + Type: analysisinternal.TypeExpr(file, pkg, paramTypes[i]), }) } diff --git a/internal/lsp/source/extract.go b/internal/lsp/source/extract.go index 1272ff37fe5..90999d821a6 100644 --- a/internal/lsp/source/extract.go +++ b/internal/lsp/source/extract.go @@ -330,7 +330,7 @@ func extractFunctionMethod(fset *token.FileSet, rng span.Range, src []byte, file // The blank identifier is always a local variable continue } - typ := analysisinternal.TypeExpr(fset, file, pkg, v.obj.Type()) + typ := analysisinternal.TypeExpr(file, pkg, v.obj.Type()) if typ == nil { return nil, fmt.Errorf("nil AST expression for type: %v", v.obj.Name()) } @@ -1130,7 +1130,7 @@ func generateReturnInfo(enclosing *ast.FuncType, pkg *types.Package, path []ast. return nil, nil, fmt.Errorf( "failed type conversion, AST expression: %T", field.Type) } - expr := analysisinternal.TypeExpr(fset, file, pkg, typ) + expr := analysisinternal.TypeExpr(file, pkg, typ) if expr == nil { return nil, nil, fmt.Errorf("nil AST expression") } @@ -1138,10 +1138,9 @@ func generateReturnInfo(enclosing *ast.FuncType, pkg *types.Package, path []ast. name, idx = generateAvailableIdentifier(pos, file, path, info, "returnValue", idx) retVars = append(retVars, &returnVariable{ - name: ast.NewIdent(name), - decl: &ast.Field{Type: expr}, - zeroVal: analysisinternal.ZeroValue( - fset, file, pkg, typ), + name: ast.NewIdent(name), + decl: &ast.Field{Type: expr}, + zeroVal: analysisinternal.ZeroValue(file, pkg, typ), }) } } @@ -1170,7 +1169,7 @@ func adjustReturnStatements(returnTypes []*ast.Field, seenVars map[types.Object] if typ != returnType.Type { continue } - val = analysisinternal.ZeroValue(fset, file, pkg, obj.Type()) + val = analysisinternal.ZeroValue(file, pkg, obj.Type()) break } if val == nil { diff --git a/internal/lsp/source/stub.go b/internal/lsp/source/stub.go index f09bbab03ef..0d1981795f2 100644 --- a/internal/lsp/source/stub.go +++ b/internal/lsp/source/stub.go @@ -19,6 +19,7 @@ import ( "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/internal/lsp/analysis/stubmethods" "golang.org/x/tools/internal/lsp/protocol" + "golang.org/x/tools/internal/lsp/safetoken" "golang.org/x/tools/internal/span" "golang.org/x/tools/internal/typeparams" ) @@ -57,8 +58,8 @@ func stubSuggestedFixFunc(ctx context.Context, snapshot Snapshot, fh VersionedFi if err != nil { return nil, fmt.Errorf("error reading concrete file source: %w", err) } - insertPos := snapshot.FileSet().Position(nodes[1].End()).Offset - if insertPos >= len(concreteSrc) { + insertPos, err := safetoken.Offset(parsedConcreteFile.Tok, nodes[1].End()) + if err != nil || insertPos >= len(concreteSrc) { return nil, fmt.Errorf("insertion position is past the end of the file") } var buf bytes.Buffer diff --git a/internal/lsp/source/util.go b/internal/lsp/source/util.go index 06720d7085b..9cb2ee69482 100644 --- a/internal/lsp/source/util.go +++ b/internal/lsp/source/util.go @@ -113,15 +113,11 @@ func IsGenerated(ctx context.Context, snapshot Snapshot, uri span.URI) bool { if err != nil { return false } - tok := snapshot.FileSet().File(pgf.File.Pos()) - if tok == nil { - return false - } for _, commentGroup := range pgf.File.Comments { for _, comment := range commentGroup.List { if matched := generatedRx.MatchString(comment.Text); matched { // Check if comment is at the beginning of the line in source. - if pos := tok.Position(comment.Slash); pos.Column == 1 { + if pgf.Tok.Position(comment.Slash).Column == 1 { return true } }