Skip to content

Commit

Permalink
remove test case
Browse files Browse the repository at this point in the history
  • Loading branch information
rxin committed Apr 7, 2016
1 parent 8719c26 commit c5de86b
Showing 1 changed file with 0 additions and 34 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -668,40 +668,6 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes
df.write.format(dataSourceName).partitionBy("c", "d", "e").saveAsTable("t")
}
}

test("SPARK-9899 Disable customized output committer when speculation is on") {
val clonedConf = new Configuration(hadoopConfiguration)
val speculationEnabled =
sqlContext.sparkContext.conf.getBoolean("spark.speculation", defaultValue = false)

try {
withTempPath { dir =>
// Enables task speculation
sqlContext.sparkContext.conf.set("spark.speculation", "true")

// Uses a customized output committer which always fails
hadoopConfiguration.set(
SQLConf.OUTPUT_COMMITTER_CLASS.key,
classOf[AlwaysFailOutputCommitter].getName)

// Code below shouldn't throw since customized output committer should be disabled.
val df = sqlContext.range(10).toDF().coalesce(1)
df.write.format(dataSourceName).save(dir.getCanonicalPath)
checkAnswer(
sqlContext
.read
.format(dataSourceName)
.option("dataSchema", df.schema.json)
.load(dir.getCanonicalPath),
df)
}
} finally {
// Hadoop 1 doesn't have `Configuration.unset`
hadoopConfiguration.clear()
clonedConf.asScala.foreach(entry => hadoopConfiguration.set(entry.getKey, entry.getValue))
sqlContext.sparkContext.conf.set("spark.speculation", speculationEnabled.toString)
}
}
}

// This class is used to test SPARK-8578. We should not use any custom output committer when
Expand Down

0 comments on commit c5de86b

Please sign in to comment.