Skip to content

Commit

Permalink
make caseSensitive configurable
Browse files Browse the repository at this point in the history
  • Loading branch information
jackylk committed Dec 2, 2014
1 parent 6dfe38a commit 578d167
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 1 deletion.
8 changes: 8 additions & 0 deletions sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ private[spark] object SQLConf {
// This is only used for the thriftserver
val THRIFTSERVER_POOL = "spark.sql.thriftserver.scheduler.pool"

val CASE_SENSITIVE = "spark.sql.caseSensitive"

object Deprecated {
val MAPRED_REDUCE_TASKS = "mapred.reduce.tasks"
}
Expand Down Expand Up @@ -148,6 +150,12 @@ private[sql] trait SQLConf {
private[spark] def columnNameOfCorruptRecord: String =
getConf(COLUMN_NAME_OF_CORRUPT_RECORD, "_corrupt_record")

/**
* When set to true, analyzer is case sensitive
*/
private[spark] def caseSensitive: Boolean =
getConf(CASE_SENSITIVE, "true").toBoolean

/** ********************** SQLConf functionality methods ************ */

/** Set Spark SQL configuration properties. */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ class SQLContext(@transient val sparkContext: SparkContext)

@transient
protected[sql] lazy val analyzer: Analyzer =
new Analyzer(catalog, functionRegistry, caseSensitive = true)
new Analyzer(catalog, functionRegistry, caseSensitive)

@transient
protected[sql] lazy val optimizer: Optimizer = DefaultOptimizer
Expand Down

0 comments on commit 578d167

Please sign in to comment.