Skip to content

Commit

Permalink
fix final tests?
Browse files Browse the repository at this point in the history
  • Loading branch information
marmbrus committed May 6, 2015
1 parent 1c50813 commit 1d8ae44
Show file tree
Hide file tree
Showing 4 changed files with 33 additions and 9 deletions.
3 changes: 3 additions & 0 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,9 @@ object SparkBuild extends PomBuild {
sparkShell := {
(runMain in Compile).toTask(" org.apache.spark.repl.Main -usejavacp").value
},

javaOptions in Compile += "-Dspark.master=local",

sparkSql := {
(runMain in Compile).toTask(" org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver").value
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -215,9 +215,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
}

sparkContext.getConf.getAll.foreach {
case (key, value) if key.startsWith("spark.sql") =>
println(s"$key=$value")
setConf(key, value)
case (key, value) if key.startsWith("spark.sql") => setConf(key, value)
case _ =>
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,10 +100,14 @@ private[hive] object SparkSQLCLIDriver {

// Set all properties specified via command line.
val conf: HiveConf = sessionState.getConf
sessionState.cmdProperties.entrySet().foreach { item: java.util.Map.Entry[Object, Object] =>
//conf.set(item.getKey.asInstanceOf[String], item.getValue.asInstanceOf[String])
//sessionState.getOverriddenConfigurations.put(
// item.getKey.asInstanceOf[String], item.getValue.asInstanceOf[String])
sessionState.cmdProperties.entrySet().foreach { item =>
val key = item.getKey.asInstanceOf[String]
val value = item.getValue.asInstanceOf[String]
// We do not propogate metastore options to the execution copy of hive.
if (key != "javax.jdo.option.ConnectionURL") {
conf.set(key, value)
sessionState.getOverriddenConfigurations.put(key, value)
}
}

SessionState.start(sessionState)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -290,15 +290,34 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
/** Overridden by child classes that need to set configuration before the client init. */
protected def configure(): Map[String, String] = Map.empty


protected[hive] class SQLSession extends super.SQLSession {
protected[sql] override lazy val conf: SQLConf = new SQLConf {
override def dialect: String = getConf(SQLConf.DIALECT, "hiveql")
}

protected[hive] def localSession = executionHive.state

protected[hive] def hiveconf = executionConf
/**
* SQLConf and HiveConf contracts:
*
* 1. reuse existing started SessionState if any
* 2. when the Hive session is first initialized, params in HiveConf will get picked up by the
* SQLConf. Additionally, any properties set by set() or a SET command inside sql() will be
* set in the SQLConf *as well as* in the HiveConf.
*/
protected[hive] lazy val sessionState: SessionState = {
var state = SessionState.get()
if (state == null) {
state = new SessionState(new HiveConf(classOf[SessionState]))
SessionState.start(state)
}
state
}

protected[hive] lazy val hiveconf: HiveConf = {
setConf(sessionState.getConf.getAllProperties)
sessionState.getConf
}
}

override protected[sql] def dialectClassName = if (conf.dialect == "hiveql") {
Expand Down

0 comments on commit 1d8ae44

Please sign in to comment.