From 02e09ed59508bada5587a36cb0db0b07ae700ac2 Mon Sep 17 00:00:00 2001 From: pgandhi Date: Mon, 26 Jun 2017 09:31:10 -0500 Subject: [PATCH 1/2] Commit to pgandhi --- .../history/ApplicationHistoryProvider.scala | 3 ++- .../deploy/history/FsHistoryProvider.scala | 19 ++++++++++++++----- 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/history/ApplicationHistoryProvider.scala b/core/src/main/scala/org/apache/spark/deploy/history/ApplicationHistoryProvider.scala index 6d8758a3d3b1d..5cb48ca3e60b0 100644 --- a/core/src/main/scala/org/apache/spark/deploy/history/ApplicationHistoryProvider.scala +++ b/core/src/main/scala/org/apache/spark/deploy/history/ApplicationHistoryProvider.scala @@ -30,7 +30,8 @@ private[spark] case class ApplicationAttemptInfo( endTime: Long, lastUpdated: Long, sparkUser: String, - completed: Boolean = false) + completed: Boolean = false, + appSparkVersion: String) private[spark] case class ApplicationHistoryInfo( id: String, diff --git a/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala b/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala index cd241d6d22745..23c3686d67606 100644 --- a/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala +++ b/core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala @@ -248,7 +248,8 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock) val conf = this.conf.clone() val appSecManager = new SecurityManager(conf) SparkUI.createHistoryUI(conf, replayBus, appSecManager, appInfo.name, - HistoryServer.getAttemptURI(appId, attempt.attemptId), attempt.startTime) + HistoryServer.getAttemptURI(appId, attempt.attemptId), + attempt.startTime) // Do not call ui.bind() to avoid creating a new server for each application } @@ -257,7 +258,10 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock) val appListener = replay(fileStatus, isApplicationCompleted(fileStatus), replayBus) if (appListener.appId.isDefined) { + ui.getSecurityManager.setAcls(HISTORY_UI_ACLS_ENABLE) + ui.appSparkVersion = appListener.appSparkVersion.getOrElse("") + // make sure to set admin acls before view acls so they are properly picked up val adminAcls = HISTORY_UI_ADMIN_ACLS + "," + appListener.adminAcls.getOrElse("") ui.getSecurityManager.setAdminAcls(adminAcls) @@ -450,7 +454,8 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock) val newAttempts = try { val eventsFilter: ReplayEventsFilter = { eventString => eventString.startsWith(APPL_START_EVENT_PREFIX) || - eventString.startsWith(APPL_END_EVENT_PREFIX) + eventString.startsWith(APPL_END_EVENT_PREFIX) || + eventString.startsWith(LOG_START_EVENT_PREFIX) } val logPath = fileStatus.getPath() @@ -476,7 +481,8 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock) lastUpdated, appListener.sparkUser.getOrElse(NOT_STARTED), appCompleted, - fileStatus.getLen() + fileStatus.getLen(), + appListener.appSparkVersion.getOrElse("") ) fileToAppInfo(logPath) = attemptInfo logDebug(s"Application log ${attemptInfo.logPath} loaded successfully: $attemptInfo") @@ -742,6 +748,8 @@ private[history] object FsHistoryProvider { private val APPL_START_EVENT_PREFIX = "{\"Event\":\"SparkListenerApplicationStart\"" private val APPL_END_EVENT_PREFIX = "{\"Event\":\"SparkListenerApplicationEnd\"" + + private val LOG_START_EVENT_PREFIX = "{\"Event\":\"SparkListenerLogStart\"" } /** @@ -769,9 +777,10 @@ private class FsApplicationAttemptInfo( lastUpdated: Long, sparkUser: String, completed: Boolean, - val fileSize: Long) + val fileSize: Long, + appSparkVersion: String) extends ApplicationAttemptInfo( - attemptId, startTime, endTime, lastUpdated, sparkUser, completed) { + attemptId, startTime, endTime, lastUpdated, sparkUser, completed, appSparkVersion) { /** extend the superclass string value with the extra attributes of this class */ override def toString: String = { From 0c36e226419f95760b85ed4d4ef7d6ad182f1bcf Mon Sep 17 00:00:00 2001 From: pgandhi Date: Thu, 29 Jun 2017 14:18:50 -0500 Subject: [PATCH 2/2] Jira YSPARK-713 fix --- ypackage/conf/spark-env-gen.sh | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/ypackage/conf/spark-env-gen.sh b/ypackage/conf/spark-env-gen.sh index 426d239cabd20..0fd5dd282727a 100644 --- a/ypackage/conf/spark-env-gen.sh +++ b/ypackage/conf/spark-env-gen.sh @@ -11,8 +11,14 @@ color=$(echo $cluster | cut -d . -f 2) spark_kerberos_keytab="/etc/grid-keytabs/${grid}${color}-jt1.prod.service.keytab" spark_kerberos_principal="mapred/${grid}${color}-jt1.${color}.ygrid.yahoo.com@YGRID.YAHOO.COM" -spark_history_ssl_keystore="/etc/ssl/certs/prod/_${color}_ygrid_yahoo_com/mapred.jks" -spark_history_ssl_truststore="/etc/ssl/certs/prod/_${color}_ygrid_yahoo_com/mapred.jks" +if [ -z ${color} ] + then + spark_history_ssl_keystore="/etc/ssl/certs/prod/_open_ygrid_yahoo_com/mapred.jks" + spark_history_ssl_truststore="/etc/ssl/certs/prod/_open_ygrid_yahoo_com/mapred.jks" +else + spark_history_ssl_keystore="/etc/ssl/certs/prod/_${color}_ygrid_yahoo_com/mapred.jks" + spark_history_ssl_truststore="/etc/ssl/certs/prod/_${color}_ygrid_yahoo_com/mapred.jks" +fi # if it doesn't exist assume QE setup if [ ! -e $spark_kerberos_keytab ]