diff --git a/docs/configuration.md b/docs/configuration.md
index 780fc94908d38..0017219e07261 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -767,7 +767,7 @@ Apart from these, the following properties are also available, and may be useful
spark.kryo.referenceTracking |
- true (false when using Spark SQL Thrift Server) |
+ true |
Whether to track references to the same object when serializing data with Kryo, which is
necessary if your object graphs have loops and useful for efficiency if they contain multiple
@@ -838,8 +838,7 @@ Apart from these, the following properties are also available, and may be useful
|
spark.serializer |
- org.apache.spark.serializer. JavaSerializer (org.apache.spark.serializer.
- KryoSerializer when using Spark SQL Thrift Server)
+ org.apache.spark.serializer. JavaSerializer
|
Class to use for serializing objects that will be sent over the network or need to be cached
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
index 638911599aad3..78a309497ab57 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala
@@ -19,8 +19,6 @@ package org.apache.spark.sql.hive.thriftserver
import java.io.PrintStream
-import scala.collection.JavaConverters._
-
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{SparkSession, SQLContext}
@@ -37,8 +35,6 @@ private[hive] object SparkSQLEnv extends Logging {
def init() {
if (sqlContext == null) {
val sparkConf = new SparkConf(loadDefaults = true)
- val maybeSerializer = sparkConf.getOption("spark.serializer")
- val maybeKryoReferenceTracking = sparkConf.getOption("spark.kryo.referenceTracking")
// If user doesn't specify the appName, we want to get [SparkSQL::localHostName] instead of
// the default appName [SparkSQLCLIDriver] in cli or beeline.
val maybeAppName = sparkConf
@@ -47,12 +43,6 @@ private[hive] object SparkSQLEnv extends Logging {
sparkConf
.setAppName(maybeAppName.getOrElse(s"SparkSQL::${Utils.localHostName()}"))
- .set(
- "spark.serializer",
- maybeSerializer.getOrElse("org.apache.spark.serializer.KryoSerializer"))
- .set(
- "spark.kryo.referenceTracking",
- maybeKryoReferenceTracking.getOrElse("false"))
val sparkSession = SparkSession.builder.config(sparkConf).enableHiveSupport().getOrCreate()
sparkContext = sparkSession.sparkContext
|