Skip to content

Commit

Permalink
fix #SPARK-26794 SparkSession enableHiveSupport does not point to hiv…
Browse files Browse the repository at this point in the history
…e but in-memory while the SparkContext exists
  • Loading branch information
yaooqinn committed Jan 31, 2019
1 parent aeff69b commit 275e998
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ class SparkSession private(
@Unstable
@transient
lazy val sharedState: SharedState = {
existingSharedState.getOrElse(new SharedState(sparkContext))
existingSharedState.getOrElse(new SharedState(sparkContext, initialSessionOptions.toMap))
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,8 @@ import org.apache.spark.util.{MutableURLClassLoader, Utils}
/**
* A class that holds all state shared across sessions in a given [[SQLContext]].
*/
private[sql] class SharedState(val sparkContext: SparkContext) extends Logging {
private[sql] class SharedState(val sparkContext: SparkContext, initConfig: Map[String, String])
extends Logging {

// Load hive-site.xml into hadoopConf and determine the warehouse path we want to use, based on
// the config from both hive and Spark SQL. Finally set the warehouse config value to sparkConf.
Expand Down Expand Up @@ -101,7 +102,7 @@ private[sql] class SharedState(val sparkContext: SparkContext) extends Logging {
*/
lazy val externalCatalog: ExternalCatalogWithListener = {
val externalCatalog = SharedState.reflect[ExternalCatalog, SparkConf, Configuration](
SharedState.externalCatalogClassName(sparkContext.conf),
SharedState.externalCatalogClassName(sparkContext.conf, initConfig),
sparkContext.conf,
sparkContext.hadoopConfiguration)

Expand Down Expand Up @@ -165,8 +166,11 @@ object SharedState extends Logging {

private val HIVE_EXTERNAL_CATALOG_CLASS_NAME = "org.apache.spark.sql.hive.HiveExternalCatalog"

private def externalCatalogClassName(conf: SparkConf): String = {
conf.get(CATALOG_IMPLEMENTATION) match {
private def externalCatalogClassName(
conf: SparkConf,
initSessionConfig: Map[String, String]): String = {
initSessionConfig
.getOrElse(CATALOG_IMPLEMENTATION.key, conf.get(CATALOG_IMPLEMENTATION)) match {
case "hive" => HIVE_EXTERNAL_CATALOG_CLASS_NAME
case "in-memory" => classOf[InMemoryCatalog].getCanonicalName
}
Expand Down

0 comments on commit 275e998

Please sign in to comment.