Skip to content

Commit

Permalink
approximate conventional blaze test console output when `--experime…
Browse files Browse the repository at this point in the history
…ntal_use_validation_aspect` is used while leaving test_summary BEP events unchanged

PiperOrigin-RevId: 374245966
  • Loading branch information
kevin1e100 authored and copybara-github committed May 17, 2021
1 parent fdab28c commit f6a822f
Show file tree
Hide file tree
Showing 8 changed files with 130 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
* as --keep_going, --jobs, etc.
*/
public class BuildRequest implements OptionsProvider {
static final String VALIDATION_ASPECT_NAME = "ValidateTarget";
public static final String VALIDATION_ASPECT_NAME = "ValidateTarget";

private static final ImmutableList<Class<? extends OptionsBase>> MANDATORY_OPTIONS =
ImmutableList.of(
Expand Down Expand Up @@ -391,7 +391,7 @@ public ImmutableList<String> getAspects() {
}

/** Whether {@value #VALIDATION_ASPECT_NAME} is in use. */
boolean useValidationAspect() {
public boolean useValidationAspect() {
return validationMode() == OutputGroupInfo.ValidationMode.ASPECT;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,6 @@ void setSkippedTargets(Collection<ConfiguredTarget> skippedTargets) {
* Returns the set of targets which were skipped (Blaze didn't attempt to execute them)
* because they're not compatible with the build's target platform.
*/
@VisibleForTesting
public Collection<ConfiguredTarget> getSkippedTargets() {
return skippedTargets;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,12 @@
import com.google.devtools.build.lib.skyframe.ConfiguredTargetKey;
import com.google.devtools.build.lib.util.DetailedExitCode;
import com.google.devtools.build.lib.util.DetailedExitCode.DetailedExitCodeComparator;
import com.google.devtools.build.lib.view.test.TestStatus.BlazeTestStatus;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import javax.annotation.Nullable;

/** Aggregates and reports target-wide test statuses in real-time. */
@ThreadSafety.ThreadSafe
Expand Down Expand Up @@ -206,12 +208,14 @@ public void targetComplete(TargetCompleteEvent event) {
* build, run only once.
*
* @param testTargets The list of targets being run
* @param validatedTargets targets with ValidateTarget aspect success or null if aspect not used
* @param notifier A console notifier to echo results to.
* @return true if all the tests passed, else false
*/
public DetailedExitCode differentialAnalyzeAndReport(
Collection<ConfiguredTarget> testTargets,
Collection<ConfiguredTarget> skippedTargets,
@Nullable ImmutableSet<ConfiguredTargetKey> validatedTargets,
TestResultNotifier notifier) {
Preconditions.checkNotNull(testTargets);
Preconditions.checkNotNull(notifier);
Expand Down Expand Up @@ -240,6 +244,27 @@ public DetailedExitCode differentialAnalyzeAndReport(
TestResultAggregator aggregator = aggregators.get(asKey(testTarget));
summary = aggregator.aggregateAndReportSummary(skipTargetsOnFailure);
}

if (validatedTargets != null
&& summary.getStatus() != BlazeTestStatus.NO_STATUS
&& !validatedTargets.contains(asKey(testTarget))) {
// Approximate what targetFailure() would do for test targets that failed validation for
// the purposes of printing test results to console only. Note that absent -k,
// targetFailure() ends up marking one test as FAILED_TO_BUILD before buildComplete() marks
// the remaining targets NO_STATUS. While we could approximate that, for simplicity, we
// just use NO_STATUS for all tests with failed validations for simplicity here (absent -k).
// Events published on BEP are not affected by this, but validation failures are published
// as separate events and are additionally accounted in TargetSummary BEP messages.
TestSummary.Builder summaryBuilder = TestSummary.newBuilder();
summaryBuilder.mergeFrom(summary);
summaryBuilder.setStatus(
skipTargetsOnFailure
? BlazeTestStatus.NO_STATUS
: TestResultAggregator.aggregateStatus(
summary.getStatus(), BlazeTestStatus.FAILED_TO_BUILD));
summary = summaryBuilder.build();
}

summaries.add(summary);

// Finished aggregating; build the final console output.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ synchronized void targetSkipped() {
postSummary();
}

private static BlazeTestStatus aggregateStatus(BlazeTestStatus status, BlazeTestStatus other) {
static BlazeTestStatus aggregateStatus(BlazeTestStatus status, BlazeTestStatus other) {
return status.getNumber() > other.getNumber() ? status : other;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -257,8 +257,11 @@ static String getAttemptSummary(TestSummary summary) {
}

static String getCacheMessage(TestSummary summary) {
if (summary.getNumCached() == 0 || summary.getStatus() == BlazeTestStatus.INCOMPLETE) {
return "";
if (summary.getNumCached() == 0
|| summary.getStatus() == BlazeTestStatus.INCOMPLETE
|| summary.getStatus() == BlazeTestStatus.NO_STATUS
|| summary.getStatus() == BlazeTestStatus.FAILED_TO_BUILD) {
return ""; // either no caching, or information isn't useful
} else if (summary.getNumCached() == summary.totalRuns()) {
return "(cached) ";
} else {
Expand All @@ -268,8 +271,10 @@ static String getCacheMessage(TestSummary summary) {
}

static String getTimeSummary(TestSummary summary) {
if (summary.getTestTimes().isEmpty()) {
return "";
if (summary.getTestTimes().isEmpty()
|| summary.getStatus() == BlazeTestStatus.NO_STATUS
|| summary.getStatus() == BlazeTestStatus.FAILED_TO_BUILD) {
return ""; // either no tests ran, or information isn't useful
} else if (summary.getTestTimes().size() == 1) {
return " in " + timeInSec(summary.getTestTimes().get(0), TimeUnit.MILLISECONDS);
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ java_library(
"//src/main/java/com/google/devtools/build/lib/analysis:config/core_options",
"//src/main/java/com/google/devtools/build/lib/analysis:config/fragment",
"//src/main/java/com/google/devtools/build/lib/analysis:config/fragment_options",
"//src/main/java/com/google/devtools/build/lib/analysis:config/fragment_provider",
"//src/main/java/com/google/devtools/build/lib/analysis:config/invalid_configuration_exception",
"//src/main/java/com/google/devtools/build/lib/analysis:config/run_under",
"//src/main/java/com/google/devtools/build/lib/analysis:configured_target",
Expand Down Expand Up @@ -67,7 +66,9 @@ java_library(
"//src/main/java/com/google/devtools/build/lib/runtime/commands/events",
"//src/main/java/com/google/devtools/build/lib/runtime/commands/info",
"//src/main/java/com/google/devtools/build/lib/shell",
"//src/main/java/com/google/devtools/build/lib/skyframe:aspect_value_key",
"//src/main/java/com/google/devtools/build/lib/skyframe:build_configuration_value",
"//src/main/java/com/google/devtools/build/lib/skyframe:configured_target_key",
"//src/main/java/com/google/devtools/build/lib/skyframe:loading_phase_started_event",
"//src/main/java/com/google/devtools/build/lib/skyframe:package_progress_receiver",
"//src/main/java/com/google/devtools/build/lib/skyframe:sky_functions",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
package com.google.devtools.build.lib.runtime.commands;

import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.config.CoreOptions;
import com.google.devtools.build.lib.buildtool.BuildRequest;
Expand Down Expand Up @@ -42,6 +43,8 @@
import com.google.devtools.build.lib.server.FailureDetails;
import com.google.devtools.build.lib.server.FailureDetails.FailureDetail;
import com.google.devtools.build.lib.server.FailureDetails.TestCommand.Code;
import com.google.devtools.build.lib.skyframe.AspectValueKey.AspectKey;
import com.google.devtools.build.lib.skyframe.ConfiguredTargetKey;
import com.google.devtools.build.lib.util.DetailedExitCode;
import com.google.devtools.build.lib.util.io.AnsiTerminalPrinter;
import com.google.devtools.build.lib.vfs.Path;
Expand Down Expand Up @@ -194,8 +197,7 @@ private BlazeCommandResult doTest(
}

DetailedExitCode testResults =
analyzeTestResults(
testTargets, buildResult.getSkippedTargets(), testListener, options, env, printer);
analyzeTestResults(request, buildResult, testListener, options, env, printer);

if (testResults.isSuccess() && !buildResult.getSuccess()) {
// If all tests run successfully, test summary should include warning if
Expand All @@ -222,17 +224,29 @@ private BlazeCommandResult doTest(
* summarizing those test results.
*/
private static DetailedExitCode analyzeTestResults(
Collection<ConfiguredTarget> testTargets,
Collection<ConfiguredTarget> skippedTargets,
BuildRequest buildRequest,
BuildResult buildResult,
AggregatingTestListener listener,
OptionsParsingResult options,
CommandEnvironment env,
AnsiTerminalPrinter printer) {
ImmutableSet<ConfiguredTargetKey> validatedTargets;
if (buildRequest.useValidationAspect()) {
validatedTargets =
buildResult.getSuccessfulAspects().stream()
.filter(key -> BuildRequest.VALIDATION_ASPECT_NAME.equals(key.getAspectName()))
.map(AspectKey::getBaseConfiguredTargetKey)
.collect(ImmutableSet.toImmutableSet());
} else {
validatedTargets = null;
}

TestResultNotifier notifier = new TerminalTestResultNotifier(
printer,
makeTestLogPathFormatter(options, env),
options);
return listener.differentialAnalyzeAndReport(testTargets, skippedTargets, notifier);
return listener.differentialAnalyzeAndReport(
buildResult.getTestTargets(), buildResult.getSkippedTargets(), validatedTargets, notifier);
}

private static TestLogPathFormatter makeTestLogPathFormatter(
Expand Down
77 changes: 72 additions & 5 deletions src/test/shell/integration/validation_actions_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,12 @@ sh_test(
data = [":some_implicit_dep"],
)
sh_test(
name = "test_with_same_validation_in_deps",
srcs = ["test_with_rule_with_validation_in_deps.sh"],
data = [":some_implicit_dep"],
)
genquery(
name = "genquery_with_validation_actions_somewhere",
expression = "deps(//validation_actions:gen)",
Expand Down Expand Up @@ -379,10 +385,12 @@ function test_failing_validation_action_for_dep_from_test_fails_build() {
setup_test_project
setup_failing_validation_action

# Validation actions in the the deps of the test should fail the build when
# Validation actions in the deps of the test should fail the build when
# run with bazel test.
bazel test --experimental_run_validations //validation_actions:test_with_rule_with_validation_in_deps >& "$TEST_log" && fail "Expected build to fail"
expect_log "validation failed!"
expect_log "FAILED TO BUILD"
expect_log "out of 1 test: 1 fails to build."
}

function test_slow_failing_validation_action_for_dep_from_test_fails_build() {
Expand All @@ -392,12 +400,71 @@ function test_slow_failing_validation_action_for_dep_from_test_fails_build() {
# Validation actions in the deps of the test should fail the build when run
# with "bazel test", even though validation finishes after test
bazel clean # Clean out any previous test or validation outputs
bazel test --experimental_run_validations --experimental_use_validation_aspect \
//validation_actions:test_with_rule_with_validation_in_deps >& "$TEST_log" && fail "Expected build to fail"
bazel test --experimental_run_validations \
--experimental_use_validation_aspect \
//validation_actions:test_with_rule_with_validation_in_deps >& "$TEST_log" \
&& fail "Expected build to fail"
expect_log "validation failed!"
expect_log "Target //validation_actions:test_with_rule_with_validation_in_deps failed to build"
# Potential race: hopefully validation is so slow that test always completes
expect_log "out of 1 test: 1 test passes."
# --experimental_use_validation_aspect reports all affected tests NO STATUS
# for simplicity, while one test is randomly reports FAILED TO BUILD without
# that flag (absent -k).
expect_log "NO STATUS"
expect_log "out of 1 test: 1 was skipped."
}

function test_failing_validation_action_fails_multiple_tests() {
setup_test_project
setup_failing_validation_action

# Validation actions in the deps of the test should fail the build when run
# with bazel test.
bazel test --experimental_run_validations \
//validation_actions:test_with_rule_with_validation_in_deps \
//validation_actions:test_with_same_validation_in_deps >& "$TEST_log" \
&& fail "Expected build to fail"
expect_log "validation failed!"
# One test is (randomly) marked as FAILED_TO_BUILD
expect_log_once "FAILED TO BUILD"
expect_log "out of 2 tests: 1 fails to build and 1 was skipped."
}

function test_slow_failing_validation_action_fails_multiple_tests() {
setup_test_project
setup_slow_failing_validation_action

# Validation actions in the deps of the test should fail the build when run
# with "bazel test", even though validation finishes after test
bazel clean # Clean out any previous test or validation outputs
bazel test --experimental_run_validations \
--experimental_use_validation_aspect \
//validation_actions:test_with_rule_with_validation_in_deps \
//validation_actions:test_with_same_validation_in_deps >& "$TEST_log" \
&& fail "Expected build to fail"
expect_log "validation failed!"
# --experimental_use_validation_aspect reports all affected tests NO STATUS
# for simplicity, while one test is randomly reports FAILED TO BUILD without
# that flag (absent -k).
expect_log_n "NO STATUS" 2
expect_log "out of 2 tests: 2 were skipped."
}

function test_slow_failing_validation_action_fails_multiple_tests_keep_going() {
setup_test_project
setup_slow_failing_validation_action

# Validation actions in the deps of the test should fail the build when run
# with "bazel test", even though validation finishes after test
bazel clean # Clean out any previous test or validation outputs
bazel test -k --experimental_run_validations \
--experimental_use_validation_aspect \
//validation_actions:test_with_rule_with_validation_in_deps \
//validation_actions:test_with_same_validation_in_deps >& "$TEST_log" \
&& fail "Expected build to fail"
expect_log "validation failed!"
expect_log_n "FAILED TO BUILD" 2
expect_not_log "NO STATUS"
expect_log "out of 2 tests: 2 fail to build."
}

function test_validation_actions_do_not_propagate_through_genquery() {
Expand Down

0 comments on commit f6a822f

Please sign in to comment.