From 682cfd7d5542a31538d204cef02b32585a2ecb63 Mon Sep 17 00:00:00 2001 From: Alessandro Bellina Date: Fri, 21 Dec 2018 21:25:48 +0000 Subject: [PATCH] SPARK-26285: code review comment fixes --- .../apache/spark/metrics/source/AccumulatorSource.scala | 8 ++++++-- .../apache/spark/examples/AccumulatorMetricsTest.scala | 8 +++++++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/metrics/source/AccumulatorSource.scala b/core/src/main/scala/org/apache/spark/metrics/source/AccumulatorSource.scala index 48d575c4242ef..45a4d224d45fe 100644 --- a/core/src/main/scala/org/apache/spark/metrics/source/AccumulatorSource.scala +++ b/core/src/main/scala/org/apache/spark/metrics/source/AccumulatorSource.scala @@ -56,7 +56,9 @@ class DoubleAccumulatorSource extends AccumulatorSource /** * :: Experimental :: - * Metrics source specifically for LongAccumulators. + * Metrics source specifically for LongAccumulators. Accumulators + * are only valid on the driver side, so these metrics are reported + * only by the driver. * Register LongAccumulators using: * LongAccumulatorSource.register(sc, {"name" -> longAccumulator}) */ @@ -71,7 +73,9 @@ object LongAccumulatorSource { /** * :: Experimental :: - * Metrics source specifically for DoubleAccumulators. + * Metrics source specifically for DoubleAccumulators. Accumulators + * are only valid on the driver side, so these metrics are reported + * only by the driver. * Register DoubleAccumulators using: * DoubleAccumulatorSource.register(sc, {"name" -> doubleAccumulator}) */ diff --git a/examples/src/main/scala/org/apache/spark/examples/AccumulatorMetricsTest.scala b/examples/src/main/scala/org/apache/spark/examples/AccumulatorMetricsTest.scala index 642e723341349..5d9a9a73f12ec 100644 --- a/examples/src/main/scala/org/apache/spark/examples/AccumulatorMetricsTest.scala +++ b/examples/src/main/scala/org/apache/spark/examples/AccumulatorMetricsTest.scala @@ -31,7 +31,9 @@ import org.apache.spark.sql.SparkSession * * The result is output to stdout in the driver with the values of the accumulators. * For the long accumulator, it should equal numElem the double accumulator should be - * roughly 1.1 x numElem (within double precision.) + * roughly 1.1 x numElem (within double precision.) This example also sets up a + * ConsoleSink (metrics) instance, and so registered codahale metrics (like the + * accumulator source) are reported to stdout as well. */ object AccumulatorMetricsTest { def main(args: Array[String]) { @@ -45,9 +47,13 @@ object AccumulatorMetricsTest { val sc = spark.sparkContext val acc = sc.longAccumulator("my-long-metric") + // register the accumulator, the metric system will report as + // [spark.metrics.namespace].[execId|driver].AccumulatorSource.my-long-metric LongAccumulatorSource.register(sc, List(("my-long-metric" -> acc)).toMap) val acc2 = sc.doubleAccumulator("my-double-metric") + // register the accumulator, the metric system will report as + // [spark.metrics.namespace].[execId|driver].AccumulatorSource.my-double-metric DoubleAccumulatorSource.register(sc, List(("my-double-metric" -> acc2)).toMap) val num = if (args.length > 0) args(0).toInt else 1000000