From adcb14f05deedd8305e4ded8e1709f6eb32244ae Mon Sep 17 00:00:00 2001 From: KaiXinXiaoLei Date: Thu, 29 Jan 2015 09:36:50 +0800 Subject: [PATCH] change the file. --- core/src/main/scala/org/apache/spark/SparkContext.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index f064f8e447abd..1c903e4923853 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -1329,7 +1329,7 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli val cleanedFunc = clean(func) logInfo("Starting job: " + callSite.shortForm) if (conf.getBoolean("spark.rddDebug.enable", false)) { - logInfo("toDebugString: " + rdd.toDebugString) + logInfo("RDD.toDebugString:\n" + rdd.toDebugString) } dagScheduler.runJob(rdd, cleanedFunc, partitions, callSite, allowLocal, resultHandler, localProperties.get)