From 081a723c3e5604f161d6440a105b44cc2350480d Mon Sep 17 00:00:00 2001 From: Yaron Weinsberg Date: Thu, 24 Dec 2015 05:46:39 -0800 Subject: [PATCH] add unit test for setting default RDD name to its path (SPARK-12517) This change extends SparkContextSuite to verify that RDDs that are created using file paths have their default name set to the path. --- .../scala/org/apache/spark/SparkContextSuite.scala | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala index d4f2ea87650a9..e0d62cf468b53 100644 --- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala @@ -274,6 +274,19 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext { } } + test("Default path for file based RDDs is properly set (SPARK-12517)") { + sc = new SparkContext(new SparkConf().setAppName("test").setMaster("local")) + + // Test textFile, wholeTextFiles and binaryFiles for default paths + val mockPath = "default/path/for/wholeTextFile/" + assert(sc.textFile(mockPath + "textFile").name == mockPath + "textFile") + assert(sc.wholeTextFiles(mockPath + "wholeTextFile").name == mockPath + "wholeTextFile") + assert(sc.binaryFiles(mockPath + "binaryFiles").name == mockPath + "binaryFiles") + assert(sc.hadoopFile(mockPath + "hadoopFile").name == mockPath + "hadoopFile") + assert(sc.newAPIHadoopFile(mockPath + "newAPIHadoopFile").name == mockPath + "newAPIHadoopFile") + sc.stop() + } + test("calling multiple sc.stop() must not throw any exception") { noException should be thrownBy { sc = new SparkContext(new SparkConf().setAppName("test").setMaster("local"))