Skip to content

Commit

Permalink
Merge pull request apache#8 from apache/master
Browse files Browse the repository at this point in the history
merge lastest spark
  • Loading branch information
pzzs committed Apr 8, 2015
2 parents 802261c + 15e0d2b commit 34b1a9a
Show file tree
Hide file tree
Showing 318 changed files with 8,239 additions and 3,337 deletions.
59 changes: 59 additions & 0 deletions bin/load-spark-env.cmd
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
@echo off

rem
rem Licensed to the Apache Software Foundation (ASF) under one or more
rem contributor license agreements. See the NOTICE file distributed with
rem this work for additional information regarding copyright ownership.
rem The ASF licenses this file to You under the Apache License, Version 2.0
rem (the "License"); you may not use this file except in compliance with
rem the License. You may obtain a copy of the License at
rem
rem http://www.apache.org/licenses/LICENSE-2.0
rem
rem Unless required by applicable law or agreed to in writing, software
rem distributed under the License is distributed on an "AS IS" BASIS,
rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
rem See the License for the specific language governing permissions and
rem limitations under the License.
rem

rem This script loads spark-env.cmd if it exists, and ensures it is only loaded once.
rem spark-env.cmd is loaded from SPARK_CONF_DIR if set, or within the current directory's
rem conf/ subdirectory.

if [%SPARK_ENV_LOADED%] == [] (
set SPARK_ENV_LOADED=1

if not [%SPARK_CONF_DIR%] == [] (
set user_conf_dir=%SPARK_CONF_DIR%
) else (
set user_conf_dir=%~dp0..\..\conf
)

call :LoadSparkEnv
)

rem Setting SPARK_SCALA_VERSION if not already set.

set ASSEMBLY_DIR2=%SPARK_HOME%/assembly/target/scala-2.11
set ASSEMBLY_DIR1=%SPARK_HOME%/assembly/target/scala-2.10

if [%SPARK_SCALA_VERSION%] == [] (

if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% (
echo "Presence of build for both scala versions(SCALA 2.10 and SCALA 2.11) detected."
echo "Either clean one of them or, set SPARK_SCALA_VERSION=2.11 in spark-env.cmd."
exit 1
)
if exist %ASSEMBLY_DIR2% (
set SPARK_SCALA_VERSION=2.11
) else (
set SPARK_SCALA_VERSION=2.10
)
)
exit /b 0

:LoadSparkEnv
if exist "%user_conf_dir%\spark-env.cmd" (
call "%user_conf_dir%\spark-env.cmd"
)
3 changes: 1 addition & 2 deletions bin/pyspark2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,7 @@ rem
rem Figure out where the Spark framework is installed
set SPARK_HOME=%~dp0..

rem Load environment variables from conf\spark-env.cmd, if it exists
if exist "%SPARK_HOME%\conf\spark-env.cmd" call "%SPARK_HOME%\conf\spark-env.cmd"
call %SPARK_HOME%\bin\load-spark-env.cmd

rem Figure out which Python to use.
if "x%PYSPARK_DRIVER_PYTHON%"=="x" (
Expand Down
3 changes: 1 addition & 2 deletions bin/run-example2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@ set FWDIR=%~dp0..\
rem Export this as SPARK_HOME
set SPARK_HOME=%FWDIR%

rem Load environment variables from conf\spark-env.cmd, if it exists
if exist "%FWDIR%conf\spark-env.cmd" call "%FWDIR%conf\spark-env.cmd"
call %SPARK_HOME%\bin\load-spark-env.cmd

rem Test that an argument was given
if not "x%1"=="x" goto arg_given
Expand Down
61 changes: 36 additions & 25 deletions bin/spark-class
Original file line number Diff line number Diff line change
Expand Up @@ -40,35 +40,46 @@ else
fi
fi

# Look for the launcher. In non-release mode, add the compiled classes directly to the classpath
# instead of looking for a jar file.
SPARK_LAUNCHER_CP=
if [ -f $SPARK_HOME/RELEASE ]; then
LAUNCHER_DIR="$SPARK_HOME/lib"
num_jars="$(ls -1 "$LAUNCHER_DIR" | grep "^spark-launcher.*\.jar$" | wc -l)"
if [ "$num_jars" -eq "0" -a -z "$SPARK_LAUNCHER_CP" ]; then
echo "Failed to find Spark launcher in $LAUNCHER_DIR." 1>&2
echo "You need to build Spark before running this program." 1>&2
exit 1
fi
# Find assembly jar
SPARK_ASSEMBLY_JAR=
if [ -f "$SPARK_HOME/RELEASE" ]; then
ASSEMBLY_DIR="$SPARK_HOME/lib"
else
ASSEMBLY_DIR="$SPARK_HOME/assembly/target/scala-$SPARK_SCALA_VERSION"
fi

LAUNCHER_JARS="$(ls -1 "$LAUNCHER_DIR" | grep "^spark-launcher.*\.jar$" || true)"
if [ "$num_jars" -gt "1" ]; then
echo "Found multiple Spark launcher jars in $LAUNCHER_DIR:" 1>&2
echo "$LAUNCHER_JARS" 1>&2
echo "Please remove all but one jar." 1>&2
exit 1
fi
num_jars="$(ls -1 "$ASSEMBLY_DIR" | grep "^spark-assembly.*hadoop.*\.jar$" | wc -l)"
if [ "$num_jars" -eq "0" -a -z "$SPARK_ASSEMBLY_JAR" ]; then
echo "Failed to find Spark assembly in $ASSEMBLY_DIR." 1>&2
echo "You need to build Spark before running this program." 1>&2
exit 1
fi
ASSEMBLY_JARS="$(ls -1 "$ASSEMBLY_DIR" | grep "^spark-assembly.*hadoop.*\.jar$" || true)"
if [ "$num_jars" -gt "1" ]; then
echo "Found multiple Spark assembly jars in $ASSEMBLY_DIR:" 1>&2
echo "$ASSEMBLY_JARS" 1>&2
echo "Please remove all but one jar." 1>&2
exit 1
fi

SPARK_LAUNCHER_CP="${LAUNCHER_DIR}/${LAUNCHER_JARS}"
SPARK_ASSEMBLY_JAR="${ASSEMBLY_DIR}/${ASSEMBLY_JARS}"

# Verify that versions of java used to build the jars and run Spark are compatible
if [ -n "$JAVA_HOME" ]; then
JAR_CMD="$JAVA_HOME/bin/jar"
else
LAUNCHER_DIR="$SPARK_HOME/launcher/target/scala-$SPARK_SCALA_VERSION"
if [ ! -d "$LAUNCHER_DIR/classes" ]; then
echo "Failed to find Spark launcher classes in $LAUNCHER_DIR." 1>&2
echo "You need to build Spark before running this program." 1>&2
JAR_CMD="jar"
fi

if [ $(command -v "$JAR_CMD") ] ; then
jar_error_check=$("$JAR_CMD" -tf "$SPARK_ASSEMBLY_JAR" nonexistent/class/path 2>&1)
if [[ "$jar_error_check" =~ "invalid CEN header" ]]; then
echo "Loading Spark jar with '$JAR_CMD' failed. " 1>&2
echo "This is likely because Spark was compiled with Java 7 and run " 1>&2
echo "with Java 6. (see SPARK-1703). Please use Java 7 to run Spark " 1>&2
echo "or build Spark with Java 6." 1>&2
exit 1
fi
SPARK_LAUNCHER_CP="$LAUNCHER_DIR/classes"
fi

# The launcher library will print arguments separated by a NULL character, to allow arguments with
Expand All @@ -77,7 +88,7 @@ fi
CMD=()
while IFS= read -d '' -r ARG; do
CMD+=("$ARG")
done < <("$RUNNER" -cp "$SPARK_LAUNCHER_CP" org.apache.spark.launcher.Main "$@")
done < <("$RUNNER" -cp "$SPARK_ASSEMBLY_JAR" org.apache.spark.launcher.Main "$@")

if [ "${CMD[0]}" = "usage" ]; then
"${CMD[@]}"
Expand Down
36 changes: 12 additions & 24 deletions bin/spark-class2.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -20,40 +20,28 @@ rem
rem Figure out where the Spark framework is installed
set SPARK_HOME=%~dp0..

rem Load environment variables from conf\spark-env.cmd, if it exists
if exist "%SPARK_HOME%\conf\spark-env.cmd" call "%SPARK_HOME%\conf\spark-env.cmd"
call %SPARK_HOME%\bin\load-spark-env.cmd

rem Test that an argument was given
if "x%1"=="x" (
echo Usage: spark-class ^<class^> [^<args^>]
exit /b 1
)

set LAUNCHER_CP=0
if exist %SPARK_HOME%\RELEASE goto find_release_launcher
rem Find assembly jar
set SPARK_ASSEMBLY_JAR=0

rem Look for the Spark launcher in both Scala build directories. The launcher doesn't use Scala so
rem it doesn't really matter which one is picked up. Add the compiled classes directly to the
rem classpath instead of looking for a jar file, since it's very common for people using sbt to use
rem the "assembly" target instead of "package".
set LAUNCHER_CLASSES=%SPARK_HOME%\launcher\target\scala-2.10\classes
if exist %LAUNCHER_CLASSES% (
set LAUNCHER_CP=%LAUNCHER_CLASSES%
if exist "%SPARK_HOME%\RELEASE" (
set ASSEMBLY_DIR=%SPARK_HOME%\lib
) else (
set ASSEMBLY_DIR=%SPARK_HOME%\assembly\target\scala-%SPARK_SCALA_VERSION%
)
set LAUNCHER_CLASSES=%SPARK_HOME%\launcher\target\scala-2.11\classes
if exist %LAUNCHER_CLASSES% (
set LAUNCHER_CP=%LAUNCHER_CLASSES%
)
goto check_launcher

:find_release_launcher
for %%d in (%SPARK_HOME%\lib\spark-launcher*.jar) do (
set LAUNCHER_CP=%%d
for %%d in (%ASSEMBLY_DIR%\spark-assembly*hadoop*.jar) do (
set SPARK_ASSEMBLY_JAR=%%d
)

:check_launcher
if "%LAUNCHER_CP%"=="0" (
echo Failed to find Spark launcher JAR.
if "%SPARK_ASSEMBLY_JAR%"=="0" (
echo Failed to find Spark assembly JAR.
echo You need to build Spark before running this program.
exit /b 1
)
Expand All @@ -64,7 +52,7 @@ if not "x%JAVA_HOME%"=="x" set RUNNER=%JAVA_HOME%\bin\java

rem The launcher library prints the command to be executed in a single line suitable for being
rem executed by the batch interpreter. So read all the output of the launcher into a variable.
for /f "tokens=*" %%i in ('cmd /C ""%RUNNER%" -cp %LAUNCHER_CP% org.apache.spark.launcher.Main %*"') do (
for /f "tokens=*" %%i in ('cmd /C ""%RUNNER%" -cp %SPARK_ASSEMBLY_JAR% org.apache.spark.launcher.Main %*"') do (
set SPARK_CMD=%%i
)
%SPARK_CMD%
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ $(function() {

stripeSummaryTable();

$("input:checkbox").click(function() {
$('input[type="checkbox"]').click(function() {
var column = "table ." + $(this).attr("name");
$(column).toggle();
stripeSummaryTable();
Expand All @@ -39,15 +39,15 @@ $(function() {
$("#select-all-metrics").click(function() {
if (this.checked) {
// Toggle all un-checked options.
$('input:checkbox:not(:checked)').trigger('click');
$('input[type="checkbox"]:not(:checked)').trigger('click');
} else {
// Toggle all checked options.
$('input:checkbox:checked').trigger('click');
$('input[type="checkbox"]:checked').trigger('click');
}
});

// Trigger a click on the checkbox if a user clicks the label next to it.
$("span.additional-metric-title").click(function() {
$(this).parent().find('input:checkbox').trigger('click');
$(this).parent().find('input[type="checkbox"]').trigger('click');
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,12 @@

package org.apache.spark

import java.util.concurrent.{Executors, TimeUnit}

import scala.collection.mutable

import org.apache.spark.scheduler._
import org.apache.spark.util.{SystemClock, Clock}
import org.apache.spark.util.{Clock, SystemClock, Utils}

/**
* An agent that dynamically allocates and removes executors based on the workload.
Expand Down Expand Up @@ -129,6 +131,10 @@ private[spark] class ExecutorAllocationManager(
// Listener for Spark events that impact the allocation policy
private val listener = new ExecutorAllocationListener

// Executor that handles the scheduling task.
private val executor = Executors.newSingleThreadScheduledExecutor(
Utils.namedThreadFactory("spark-dynamic-executor-allocation"))

/**
* Verify that the settings specified through the config are valid.
* If not, throw an appropriate exception.
Expand Down Expand Up @@ -173,32 +179,24 @@ private[spark] class ExecutorAllocationManager(
}

/**
* Register for scheduler callbacks to decide when to add and remove executors.
* Register for scheduler callbacks to decide when to add and remove executors, and start
* the scheduling task.
*/
def start(): Unit = {
listenerBus.addListener(listener)
startPolling()

val scheduleTask = new Runnable() {
override def run(): Unit = Utils.logUncaughtExceptions(schedule())
}
executor.scheduleAtFixedRate(scheduleTask, 0, intervalMillis, TimeUnit.MILLISECONDS)
}

/**
* Start the main polling thread that keeps track of when to add and remove executors.
* Stop the allocation manager.
*/
private def startPolling(): Unit = {
val t = new Thread {
override def run(): Unit = {
while (true) {
try {
schedule()
} catch {
case e: Exception => logError("Exception in dynamic executor allocation thread!", e)
}
Thread.sleep(intervalMillis)
}
}
}
t.setName("spark-dynamic-executor-allocation")
t.setDaemon(true)
t.start()
def stop(): Unit = {
executor.shutdown()
executor.awaitTermination(10, TimeUnit.SECONDS)
}

/**
Expand Down
Loading

0 comments on commit 34b1a9a

Please sign in to comment.