Skip to content

Commit

Permalink
Push down incremental logic from InMemoryMemoizingEvaluator to `Abs…
Browse files Browse the repository at this point in the history
…tractIncrementalInMemoryMemoizingEvaluator`.

PiperOrigin-RevId: 521590383
Change-Id: I3c0f4956119edf8a41c6871095cca9382bfc6322
  • Loading branch information
Googler authored and copybara-github committed Apr 3, 2023
1 parent 4b194d2 commit 1221742
Show file tree
Hide file tree
Showing 2 changed files with 161 additions and 111 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,151 @@
// Copyright 2023 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.skyframe;

import static com.google.common.base.Preconditions.checkNotNull;

import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.collect.nestedset.NestedSetVisitor;
import com.google.devtools.build.skyframe.InvalidatingNodeVisitor.DeletingInvalidationState;
import com.google.devtools.build.skyframe.InvalidatingNodeVisitor.DirtyingInvalidationState;
import com.google.devtools.build.skyframe.InvalidatingNodeVisitor.InvalidationState;
import com.google.devtools.build.skyframe.QueryableGraph.Reason;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;

/**
* Partial implementation of {@link MemoizingEvaluator} with expanded support for incremental and
* non-incremental evaluations on an {@link InMemoryGraph}.
*/
abstract class AbstractIncrementalInMemoryMemoizingEvaluator
extends AbstractInMemoryMemoizingEvaluator {

final ImmutableMap<SkyFunctionName, SkyFunction> skyFunctions;
final DirtyTrackingProgressReceiver progressReceiver;

// State related to invalidation and deletion.
Set<SkyKey> valuesToDelete = new LinkedHashSet<>();
private Set<SkyKey> valuesToDirty = new LinkedHashSet<>();
Map<SkyKey, SkyValue> valuesToInject = new HashMap<>();
private final DeletingInvalidationState deleterState = new DeletingInvalidationState();
final Differencer differencer;
final GraphInconsistencyReceiver graphInconsistencyReceiver;
final EventFilter eventFilter;

// Keep edges in graph. Can be false to save memory, in which case incremental builds are
// not possible.
private final boolean keepEdges;

// Values that the caller explicitly specified are assumed to be changed -- they will be
// re-evaluated even if none of their children are changed.
private final InvalidationState invalidatorState = new DirtyingInvalidationState();

final NestedSetVisitor.VisitedState emittedEventState;

AbstractIncrementalInMemoryMemoizingEvaluator(
ImmutableMap<SkyFunctionName, SkyFunction> skyFunctions,
Differencer differencer,
DirtyTrackingProgressReceiver dirtyTrackingProgressReceiver,
EventFilter eventFilter,
NestedSetVisitor.VisitedState emittedEventState,
GraphInconsistencyReceiver graphInconsistencyReceiver,
boolean keepEdges) {
this.skyFunctions = checkNotNull(skyFunctions);
this.differencer = checkNotNull(differencer);
this.progressReceiver = checkNotNull(dirtyTrackingProgressReceiver);
this.emittedEventState = checkNotNull(emittedEventState);
this.eventFilter = checkNotNull(eventFilter);
this.graphInconsistencyReceiver = checkNotNull(graphInconsistencyReceiver);
this.keepEdges = keepEdges;
}

void invalidate(Iterable<SkyKey> diff) {
Iterables.addAll(valuesToDirty, diff);
}

/**
* Removes entries in {@code valuesToInject} whose values are equal to the present values in the
* graph.
*/
void pruneInjectedValues(Map<SkyKey, SkyValue> valuesToInject) {
for (Iterator<Entry<SkyKey, SkyValue>> it = valuesToInject.entrySet().iterator();
it.hasNext(); ) {
Map.Entry<SkyKey, SkyValue> entry = it.next();
SkyKey key = entry.getKey();
SkyValue newValue = entry.getValue();
NodeEntry prevEntry = getInMemoryGraph().get(null, Reason.OTHER, key);
if (prevEntry != null && prevEntry.isDone()) {
if (keepEdges) {
try {
if (!prevEntry.hasAtLeastOneDep()) {
if (newValue.equals(prevEntry.getValue())
&& !valuesToDirty.contains(key)
&& !valuesToDelete.contains(key)) {
it.remove();
}
} else {
// Rare situation of an injected dep that depends on another node. Usually the dep is
// the error transience node. When working with external repositories, it can also be
// an external workspace file. Don't bother injecting it, just invalidate it.
// We'll wastefully evaluate the node freshly during evaluation, but this happens very
// rarely.
valuesToDirty.add(key);
it.remove();
}
} catch (InterruptedException e) {
throw new IllegalStateException(
"InMemoryGraph does not throw: " + entry + ", " + prevEntry, e);
}
} else {
// No incrementality. Just delete the old value from the graph. The new value is about to
// be injected.
getInMemoryGraph().remove(key);
}
}
}
}

/** Injects values in {@code valuesToInject} into the graph. */
void injectValues(IntVersion version) {
if (valuesToInject.isEmpty()) {
return;
}
try {
ParallelEvaluator.injectValues(valuesToInject, version, getInMemoryGraph(), progressReceiver);
} catch (InterruptedException e) {
throw new IllegalStateException("InMemoryGraph doesn't throw interrupts", e);
}
// Start with a new map to avoid bloat since clear() does not downsize the map.
valuesToInject = new HashMap<>();
}

void performInvalidation() throws InterruptedException {
EagerInvalidator.delete(
getInMemoryGraph(), valuesToDelete, progressReceiver, deleterState, keepEdges);
// Note that clearing the valuesToDelete would not do an internal resizing. Therefore, if any
// build has a large set of dirty values, subsequent operations (even clearing) will be slower.
// Instead, just start afresh with a new LinkedHashSet.
valuesToDelete = new LinkedHashSet<>();

EagerInvalidator.invalidate(
getInMemoryGraph(), valuesToDirty, progressReceiver, invalidatorState);
// Ditto.
valuesToDirty = new LinkedHashSet<>();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@

import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.google.devtools.build.lib.collect.nestedset.NestedSetVisitor;
import com.google.devtools.build.lib.concurrent.AbstractQueueVisitor;
Expand All @@ -25,14 +24,8 @@
import com.google.devtools.build.lib.profiler.Profiler;
import com.google.devtools.build.lib.profiler.SilentCloseable;
import com.google.devtools.build.skyframe.Differencer.Diff;
import com.google.devtools.build.skyframe.InvalidatingNodeVisitor.DeletingInvalidationState;
import com.google.devtools.build.skyframe.InvalidatingNodeVisitor.DirtyingInvalidationState;
import com.google.devtools.build.skyframe.InvalidatingNodeVisitor.InvalidationState;
import com.google.devtools.build.skyframe.QueryableGraph.Reason;
import java.time.Duration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
Expand All @@ -47,33 +40,12 @@
*
* <p>This memoizing evaluator uses a monotonically increasing {@link IntVersion}.
*/
public final class InMemoryMemoizingEvaluator extends AbstractInMemoryMemoizingEvaluator {

private final ImmutableMap<SkyFunctionName, SkyFunction> skyFunctions;
private final DirtyTrackingProgressReceiver progressReceiver;
public final class InMemoryMemoizingEvaluator
extends AbstractIncrementalInMemoryMemoizingEvaluator {
// Not final only for testing.
private InMemoryGraph graph;
private IntVersion lastGraphVersion = null;

// State related to invalidation and deletion.
private Set<SkyKey> valuesToDelete = new LinkedHashSet<>();
private Set<SkyKey> valuesToDirty = new LinkedHashSet<>();
private Map<SkyKey, SkyValue> valuesToInject = new HashMap<>();
private final DeletingInvalidationState deleterState = new DeletingInvalidationState();
private final Differencer differencer;
private final GraphInconsistencyReceiver graphInconsistencyReceiver;
private final EventFilter eventFilter;

// Keep edges in graph. Can be false to save memory, in which case incremental builds are
// not possible.
private final boolean keepEdges;

// Values that the caller explicitly specified are assumed to be changed -- they will be
// re-evaluated even if none of their children are changed.
private final InvalidationState invalidatorState = new DirtyingInvalidationState();

private final NestedSetVisitor.VisitedState emittedEventState;

private final AtomicBoolean evaluating = new AtomicBoolean(false);

public InMemoryMemoizingEvaluator(
Expand Down Expand Up @@ -105,21 +77,18 @@ public InMemoryMemoizingEvaluator(
NestedSetVisitor.VisitedState emittedEventState,
boolean keepEdges,
boolean usePooledSkyKeyInterning) {
this.skyFunctions = ImmutableMap.copyOf(skyFunctions);
this.differencer = Preconditions.checkNotNull(differencer);
this.progressReceiver = new DirtyTrackingProgressReceiver(progressReceiver);
this.graphInconsistencyReceiver = Preconditions.checkNotNull(graphInconsistencyReceiver);
this.eventFilter = eventFilter;
super(
ImmutableMap.copyOf(skyFunctions),
differencer,
new DirtyTrackingProgressReceiver(progressReceiver),
eventFilter,
emittedEventState,
graphInconsistencyReceiver,
keepEdges);
this.graph =
keepEdges
? InMemoryGraph.create(usePooledSkyKeyInterning)
: InMemoryGraph.createEdgeless(usePooledSkyKeyInterning);
this.emittedEventState = emittedEventState;
this.keepEdges = keepEdges;
}

private void invalidate(Iterable<SkyKey> diff) {
Iterables.addAll(valuesToDirty, diff);
}

private static final Duration MIN_TIME_TO_LOG_DELETION = Duration.ofMillis(10);
Expand Down Expand Up @@ -219,76 +188,6 @@ public <T extends SkyValue> EvaluationResult<T> evaluate(
}
}

/**
* Removes entries in {@code valuesToInject} whose values are equal to the present values in the
* graph.
*/
private void pruneInjectedValues(Map<SkyKey, SkyValue> valuesToInject) {
for (Iterator<Map.Entry<SkyKey, SkyValue>> it = valuesToInject.entrySet().iterator();
it.hasNext(); ) {
Map.Entry<SkyKey, SkyValue> entry = it.next();
SkyKey key = entry.getKey();
SkyValue newValue = entry.getValue();
NodeEntry prevEntry = graph.get(null, Reason.OTHER, key);
if (prevEntry != null && prevEntry.isDone()) {
if (keepEdges) {
try {
if (!prevEntry.hasAtLeastOneDep()) {
if (newValue.equals(prevEntry.getValue())
&& !valuesToDirty.contains(key)
&& !valuesToDelete.contains(key)) {
it.remove();
}
} else {
// Rare situation of an injected dep that depends on another node. Usually the dep is
// the error transience node. When working with external repositories, it can also be
// an external workspace file. Don't bother injecting it, just invalidate it.
// We'll wastefully evaluate the node freshly during evaluation, but this happens very
// rarely.
valuesToDirty.add(key);
it.remove();
}
} catch (InterruptedException e) {
throw new IllegalStateException(
"InMemoryGraph does not throw: " + entry + ", " + prevEntry, e);
}
} else {
// No incrementality. Just delete the old value from the graph. The new value is about to
// be injected.
graph.remove(key);
}
}
}
}

/**
* Injects values in {@code valuesToInject} into the graph.
*/
private void injectValues(IntVersion version) {
if (valuesToInject.isEmpty()) {
return;
}
try {
ParallelEvaluator.injectValues(valuesToInject, version, graph, progressReceiver);
} catch (InterruptedException e) {
throw new IllegalStateException("InMemoryGraph doesn't throw interrupts", e);
}
// Start with a new map to avoid bloat since clear() does not downsize the map.
valuesToInject = new HashMap<>();
}

private void performInvalidation() throws InterruptedException {
EagerInvalidator.delete(graph, valuesToDelete, progressReceiver, deleterState, keepEdges);
// Note that clearing the valuesToDelete would not do an internal resizing. Therefore, if any
// build has a large set of dirty values, subsequent operations (even clearing) will be slower.
// Instead, just start afresh with a new LinkedHashSet.
valuesToDelete = new LinkedHashSet<>();

EagerInvalidator.invalidate(graph, valuesToDirty, progressReceiver, invalidatorState);
// Ditto.
valuesToDirty = new LinkedHashSet<>();
}

private void setAndCheckEvaluateState(boolean newValue, Object requestInfo) {
Preconditions.checkState(evaluating.getAndSet(newValue) != newValue,
"Re-entrant evaluation for request: %s", requestInfo);
Expand Down

0 comments on commit 1221742

Please sign in to comment.