Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

reduce 1.5x memory usage on large repos on repeated runs #764

Merged
merged 1 commit into from
Oct 1, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions pkg/commands/run.go
Original file line number Diff line number Diff line change
Expand Up @@ -290,8 +290,8 @@ func (e *Executor) runAnalysis(ctx context.Context, args []string) ([]result.Iss
}
lintCtx.Log = e.log.Child("linters context")

runner, err := lint.NewRunner(lintCtx.ASTCache, e.cfg, e.log.Child("runner"),
e.goenv, e.lineCache, e.DBManager)
runner, err := lint.NewRunner(e.cfg, e.log.Child("runner"),
e.goenv, e.lineCache, e.DBManager, lintCtx.Packages)
if err != nil {
return nil, err
}
Expand Down
2 changes: 1 addition & 1 deletion pkg/golinters/goanalysis/linter.go
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ func buildIssuesFromErrorsForTypecheckMode(errs []error, lintCtx *linter.Context
if !ok {
return nil, err
}
for _, err := range libpackages.ExtractErrors(itErr.Pkg, lintCtx.ASTCache) {
for _, err := range libpackages.ExtractErrors(itErr.Pkg) {
i, perr := parseError(err)
if perr != nil { // failed to parse
if uniqReportedIssues[err.Msg] {
Expand Down
186 changes: 127 additions & 59 deletions pkg/golinters/goanalysis/runner.go
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,113 @@ func (r *runner) run(analyzers []*analysis.Analyzer, initialPackages []*packages
return extractDiagnostics(roots)
}

type actKey struct {
*analysis.Analyzer
*packages.Package
}

func (r *runner) markAllActions(a *analysis.Analyzer, pkg *packages.Package, markedActions map[actKey]struct{}) {
k := actKey{a, pkg}
if _, ok := markedActions[k]; ok {
return
}

for _, req := range a.Requires {
r.markAllActions(req, pkg, markedActions)
}

if len(a.FactTypes) != 0 {
for path := range pkg.Imports {
r.markAllActions(a, pkg.Imports[path], markedActions)
}
}

markedActions[k] = struct{}{}
}

func (r *runner) makeAction(a *analysis.Analyzer, pkg *packages.Package,
initialPkgs map[*packages.Package]bool, actions map[actKey]*action, actAlloc *actionAllocator) *action {
k := actKey{a, pkg}
act, ok := actions[k]
if ok {
return act
}

act = actAlloc.alloc()
act.a = a
act.pkg = pkg
act.log = r.log
act.prefix = r.prefix
act.pkgCache = r.pkgCache
act.isInitialPkg = initialPkgs[pkg]
act.needAnalyzeSource = initialPkgs[pkg]
act.analysisDoneCh = make(chan struct{})

depsCount := len(a.Requires)
if len(a.FactTypes) > 0 {
depsCount += len(pkg.Imports)
}
act.deps = make([]*action, 0, depsCount)

// Add a dependency on each required analyzers.
for _, req := range a.Requires {
act.deps = append(act.deps, r.makeAction(req, pkg, initialPkgs, actions, actAlloc))
}

r.buildActionFactDeps(act, a, pkg, initialPkgs, actions, actAlloc)

actions[k] = act
return act
}

func (r *runner) buildActionFactDeps(act *action, a *analysis.Analyzer, pkg *packages.Package,
initialPkgs map[*packages.Package]bool, actions map[actKey]*action, actAlloc *actionAllocator) {
// An analysis that consumes/produces facts
// must run on the package's dependencies too.
if len(a.FactTypes) == 0 {
return
}

act.objectFacts = make(map[objectFactKey]analysis.Fact)
act.packageFacts = make(map[packageFactKey]analysis.Fact)

paths := make([]string, 0, len(pkg.Imports))
for path := range pkg.Imports {
paths = append(paths, path)
}
sort.Strings(paths) // for determinism
for _, path := range paths {
dep := r.makeAction(a, pkg.Imports[path], initialPkgs, actions, actAlloc)
act.deps = append(act.deps, dep)
}

// Need to register fact types for pkgcache proper gob encoding.
for _, f := range a.FactTypes {
gob.Register(f)
}
}

type actionAllocator struct {
allocatedActions []action
nextFreeIndex int
}

func newActionAllocator(maxCount int) *actionAllocator {
return &actionAllocator{
allocatedActions: make([]action, maxCount),
nextFreeIndex: 0,
}
}

func (actAlloc *actionAllocator) alloc() *action {
if actAlloc.nextFreeIndex == len(actAlloc.allocatedActions) {
panic(fmt.Sprintf("Made too many allocations of actions: %d allowed", len(actAlloc.allocatedActions)))
}
act := &actAlloc.allocatedActions[actAlloc.nextFreeIndex]
actAlloc.nextFreeIndex++
return act
}

//nolint:gocritic
func (r *runner) prepareAnalysis(pkgs []*packages.Package,
analyzers []*analysis.Analyzer) (map[*packages.Package]bool, []*action, []*action) {
Expand All @@ -116,70 +223,30 @@ func (r *runner) prepareAnalysis(pkgs []*packages.Package,
// Each graph node (action) is one unit of analysis.
// Edges express package-to-package (vertical) dependencies,
// and analysis-to-analysis (horizontal) dependencies.
type key struct {
*analysis.Analyzer
*packages.Package
}
actions := make(map[key]*action)

initialPkgs := map[*packages.Package]bool{}
for _, pkg := range pkgs {
initialPkgs[pkg] = true
// This place is memory-intensive: e.g. Istio project has 120k total actions.
// Therefore optimize it carefully.
markedActions := make(map[actKey]struct{}, len(analyzers)*len(pkgs))
for _, a := range analyzers {
for _, pkg := range pkgs {
r.markAllActions(a, pkg, markedActions)
}
}
totalActionsCount := len(markedActions)

var mkAction func(a *analysis.Analyzer, pkg *packages.Package) *action
mkAction = func(a *analysis.Analyzer, pkg *packages.Package) *action {
k := key{a, pkg}
act, ok := actions[k]
if !ok {
act = &action{
a: a,
pkg: pkg,
log: r.log,
prefix: r.prefix,
pkgCache: r.pkgCache,
isInitialPkg: initialPkgs[pkg],
needAnalyzeSource: initialPkgs[pkg],
analysisDoneCh: make(chan struct{}),
objectFacts: make(map[objectFactKey]analysis.Fact),
packageFacts: make(map[packageFactKey]analysis.Fact),
loadMode: r.loadMode,
}

// Add a dependency on each required analyzers.
for _, req := range a.Requires {
act.deps = append(act.deps, mkAction(req, pkg))
}
actions := make(map[actKey]*action, totalActionsCount)
actAlloc := newActionAllocator(totalActionsCount)

// An analysis that consumes/produces facts
// must run on the package's dependencies too.
if len(a.FactTypes) > 0 {
paths := make([]string, 0, len(pkg.Imports))
for path := range pkg.Imports {
paths = append(paths, path)
}
sort.Strings(paths) // for determinism
for _, path := range paths {
dep := mkAction(a, pkg.Imports[path])
act.deps = append(act.deps, dep)
}

// Need to register fact types for pkgcache proper gob encoding.
for _, f := range a.FactTypes {
gob.Register(f)
}
}

actions[k] = act
}
return act
initialPkgs := make(map[*packages.Package]bool, len(pkgs))
for _, pkg := range pkgs {
initialPkgs[pkg] = true
}

// Build nodes for initial packages.
var roots []*action
roots := make([]*action, 0, len(pkgs)*len(analyzers))
for _, a := range analyzers {
for _, pkg := range pkgs {
root := mkAction(a, pkg)
root := r.makeAction(a, pkg, initialPkgs, actions, actAlloc)
root.isroot = true
roots = append(roots, root)
}
Expand All @@ -190,6 +257,8 @@ func (r *runner) prepareAnalysis(pkgs []*packages.Package,
allActions = append(allActions, act)
}

debugf("Built %d actions", len(actions))

return initialPkgs, allActions, roots
}

Expand Down Expand Up @@ -334,9 +403,6 @@ type action struct {
a *analysis.Analyzer
pkg *packages.Package
pass *analysis.Pass
isroot bool
isInitialPkg bool
needAnalyzeSource bool
deps []*action
objectFacts map[objectFactKey]analysis.Fact
packageFacts map[packageFactKey]analysis.Fact
Expand All @@ -349,7 +415,9 @@ type action struct {
analysisDoneCh chan struct{}
loadCachedFactsDone bool
loadCachedFactsOk bool
loadMode LoadMode
isroot bool
isInitialPkg bool
needAnalyzeSource bool
}

type objectFactKey struct {
Expand Down
Loading