Skip to content

Commit

Permalink
Resorts to SparkContext.version to inspect Spark version
Browse files Browse the repository at this point in the history
  • Loading branch information
liancheng committed Nov 7, 2014
1 parent cb0eae3 commit d9585e1
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 24 deletions.
24 changes: 7 additions & 17 deletions core/src/main/scala/org/apache/spark/util/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,8 @@ import java.io._
import java.lang.management.ManagementFactory
import java.net._
import java.nio.ByteBuffer
import java.util.jar.Attributes.Name
import java.util.{Properties, Locale, Random, UUID}
import java.util.concurrent.{ThreadFactory, ConcurrentHashMap, Executors, ThreadPoolExecutor}
import java.util.jar.{Manifest => JarManifest}
import java.util.concurrent.{ConcurrentHashMap, Executors, ThreadFactory, ThreadPoolExecutor}
import java.util.{Locale, Properties, Random, UUID}

import scala.collection.JavaConversions._
import scala.collection.Map
Expand All @@ -38,11 +36,11 @@ import com.google.common.io.{ByteStreams, Files}
import com.google.common.util.concurrent.ThreadFactoryBuilder
import org.apache.commons.lang3.SystemUtils
import org.apache.hadoop.conf.Configuration
import org.apache.log4j.PropertyConfigurator
import org.apache.hadoop.fs.{FileSystem, FileUtil, Path}
import org.apache.log4j.PropertyConfigurator
import org.eclipse.jetty.util.MultiException
import org.json4s._
import tachyon.client.{TachyonFile,TachyonFS}
import tachyon.client.{TachyonFS, TachyonFile}

import org.apache.spark._
import org.apache.spark.deploy.SparkHadoopUtil
Expand Down Expand Up @@ -352,8 +350,8 @@ private[spark] object Utils extends Logging {
* Download a file to target directory. Supports fetching the file in a variety of ways,
* including HTTP, HDFS and files on a standard filesystem, based on the URL parameter.
*
* If `useCache` is true, first attempts to fetch the file to a local cache that's shared
* across executors running the same application. `useCache` is used mainly for
* If `useCache` is true, first attempts to fetch the file to a local cache that's shared
* across executors running the same application. `useCache` is used mainly for
* the executors, and not in local mode.
*
* Throws SparkException if the target file already exists and has different contents than
Expand Down Expand Up @@ -400,7 +398,7 @@ private[spark] object Utils extends Logging {
} else {
doFetchFile(url, targetDir, fileName, conf, securityMgr, hadoopConf)
}

// Decompress the file if it's a .tar or .tar.gz
if (fileName.endsWith(".tar.gz") || fileName.endsWith(".tgz")) {
logInfo("Untarring " + fileName)
Expand Down Expand Up @@ -1775,13 +1773,6 @@ private[spark] object Utils extends Logging {
s"$libraryPathEnvName=$libraryPath$ampersand"
}

lazy val sparkVersion =
SparkContext.jarOfObject(this).map { path =>
val manifestUrl = new URL(s"jar:file:$path!/META-INF/MANIFEST.MF")
val manifest = new JarManifest(manifestUrl.openStream())
manifest.getMainAttributes.getValue(Name.IMPLEMENTATION_VERSION)
}.getOrElse("Unknown")

/**
* Return the value of a config either through the SparkConf or the Hadoop configuration
* if this is Yarn mode. In the latter case, this defaults to the value set through SparkConf
Expand All @@ -1795,7 +1786,6 @@ private[spark] object Utils extends Logging {
sparkValue
}
}

}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,16 @@

package org.apache.spark.sql.hive.thriftserver

import java.util.jar.Attributes.Name

import scala.collection.JavaConversions._

import java.io.IOException
import java.util.{List => JList}
import javax.security.auth.login.LoginException

import scala.collection.JavaConversions._

import org.apache.commons.logging.Log
import org.apache.hadoop.security.UserGroupInformation
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.shims.ShimLoader
import org.apache.hadoop.security.UserGroupInformation
import org.apache.hive.service.Service.STATE
import org.apache.hive.service.auth.HiveAuthFactory
import org.apache.hive.service.cli._
Expand All @@ -50,7 +48,7 @@ private[hive] class SparkSQLCLIService(hiveContext: HiveContext)
addService(sparkSqlSessionManager)
var sparkServiceUGI: UserGroupInformation = null

if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
if (ShimLoader.getHadoopShims.isSecurityEnabled) {
try {
HiveAuthFactory.loginFromKeytab(hiveConf)
sparkServiceUGI = ShimLoader.getHadoopShims.getUGIForConf(hiveConf)
Expand All @@ -68,7 +66,7 @@ private[hive] class SparkSQLCLIService(hiveContext: HiveContext)
getInfoType match {
case GetInfoType.CLI_SERVER_NAME => new GetInfoValue("Spark SQL")
case GetInfoType.CLI_DBMS_NAME => new GetInfoValue("Spark SQL")
case GetInfoType.CLI_DBMS_VER => new GetInfoValue(Utils.sparkVersion)
case GetInfoType.CLI_DBMS_VER => new GetInfoValue(hiveContext.sparkContext.version)
case _ => super.getInfo(sessionHandle, getInfoType)
}
}
Expand Down

0 comments on commit d9585e1

Please sign in to comment.