Skip to content

Commit

Permalink
SPARK-26285: code review comment fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
Alessandro Bellina committed Dec 21, 2018
1 parent 7f60730 commit 682cfd7
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,9 @@ class DoubleAccumulatorSource extends AccumulatorSource

/**
* :: Experimental ::
* Metrics source specifically for LongAccumulators.
* Metrics source specifically for LongAccumulators. Accumulators
* are only valid on the driver side, so these metrics are reported
* only by the driver.
* Register LongAccumulators using:
* LongAccumulatorSource.register(sc, {"name" -> longAccumulator})
*/
Expand All @@ -71,7 +73,9 @@ object LongAccumulatorSource {

/**
* :: Experimental ::
* Metrics source specifically for DoubleAccumulators.
* Metrics source specifically for DoubleAccumulators. Accumulators
* are only valid on the driver side, so these metrics are reported
* only by the driver.
* Register DoubleAccumulators using:
* DoubleAccumulatorSource.register(sc, {"name" -> doubleAccumulator})
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,9 @@ import org.apache.spark.sql.SparkSession
*
* The result is output to stdout in the driver with the values of the accumulators.
* For the long accumulator, it should equal numElem the double accumulator should be
* roughly 1.1 x numElem (within double precision.)
* roughly 1.1 x numElem (within double precision.) This example also sets up a
* ConsoleSink (metrics) instance, and so registered codahale metrics (like the
* accumulator source) are reported to stdout as well.
*/
object AccumulatorMetricsTest {
def main(args: Array[String]) {
Expand All @@ -45,9 +47,13 @@ object AccumulatorMetricsTest {
val sc = spark.sparkContext

val acc = sc.longAccumulator("my-long-metric")
// register the accumulator, the metric system will report as
// [spark.metrics.namespace].[execId|driver].AccumulatorSource.my-long-metric
LongAccumulatorSource.register(sc, List(("my-long-metric" -> acc)).toMap)

val acc2 = sc.doubleAccumulator("my-double-metric")
// register the accumulator, the metric system will report as
// [spark.metrics.namespace].[execId|driver].AccumulatorSource.my-double-metric
DoubleAccumulatorSource.register(sc, List(("my-double-metric" -> acc2)).toMap)

val num = if (args.length > 0) args(0).toInt else 1000000
Expand Down

0 comments on commit 682cfd7

Please sign in to comment.