Skip to content

Commit

Permalink
set CASE_SENSITIVE false in hivecontext
Browse files Browse the repository at this point in the history
  • Loading branch information
scwf committed Apr 30, 2015
1 parent fd30e25 commit 966e719
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 6 deletions.
5 changes: 3 additions & 2 deletions sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,12 @@

package org.apache.spark.sql

import org.apache.spark.sql.catalyst.CatalystConf
import java.util.Properties

import scala.collection.immutable
import scala.collection.JavaConversions._

import java.util.Properties
import org.apache.spark.sql.catalyst.CatalystConf

private[spark] object SQLConf {
val COMPRESS_CACHED = "spark.sql.inMemoryColumnarStorage.compressed"
Expand Down Expand Up @@ -266,3 +266,4 @@ private[sql] class SQLConf extends Serializable with CatalystConf {
settings.clear()
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -50,10 +50,6 @@ import org.apache.spark.sql.types._
class HiveContext(sc: SparkContext) extends SQLContext(sc) {
self =>

protected[sql] override lazy val conf: SQLConf = new SQLConf {
override def dialect: String = getConf(SQLConf.DIALECT, "hiveql")
}

/**
* When true, enables an experimental feature where metastore tables that use the parquet SerDe
* are automatically converted to use the Spark SQL parquet table scan, instead of the Hive
Expand Down Expand Up @@ -266,6 +262,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
protected[hive] class SQLSession extends super.SQLSession {
protected[sql] override lazy val conf: SQLConf = new SQLConf {
override def dialect: String = getConf(SQLConf.DIALECT, "hiveql")
setConf(CatalystConf.CASE_SENSITIVE, "false")
}

protected[hive] lazy val hiveconf: HiveConf = {
Expand Down

0 comments on commit 966e719

Please sign in to comment.