Skip to content

Commit

Permalink
[SPARK-29018][SQL][phase1] Add new Module sql/thriftserver based on H…
Browse files Browse the repository at this point in the history
…ive 2.3.6 (apache#29)

### What changes were proposed in this pull request?
Add new Module sql/thriftserver based on Hive 2.3.6.


### Why are the changes needed?
Add new Module sql/thriftserver based on Hive 2.3.6.


### Does this PR introduce any user-facing change?
NO


### How was this patch tested?
NO
  • Loading branch information
wangyum authored Nov 4, 2019
1 parent 0473dc0 commit 0e4c3fe
Show file tree
Hide file tree
Showing 227 changed files with 84,082 additions and 10 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/master.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ jobs:
run: |
export MAVEN_OPTS="-Xmx2g -XX:ReservedCodeCacheSize=1g -Dorg.slf4j.simpleLogger.defaultLogLevel=WARN"
export MAVEN_CLI_OPTS="--no-transfer-progress"
./build/mvn $MAVEN_CLI_OPTS -DskipTests -Pyarn -Pmesos -Pkubernetes -Phive -Phive-thriftserver -P${{ matrix.hadoop }} -Phadoop-cloud -Djava.version=${{ matrix.java }} package
./build/mvn $MAVEN_CLI_OPTS -DskipTests -Pyarn -Pmesos -Pkubernetes -Phive -Phive-thriftserver -Pspark-thriftserver -P${{ matrix.hadoop }} -Phadoop-cloud -Djava.version=${{ matrix.java }} package
lint:
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ TempStatsStore/
metastore/
metastore_db/
sql/hive-thriftserver/test_warehouses
sql/service/test_warehouse
warehouse/
spark-warehouse/

Expand Down
2 changes: 1 addition & 1 deletion dev/create-release/release-build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ if [[ $SPARK_VERSION > "2.4" ]]; then
fi

# Hive-specific profiles for some builds
HIVE_PROFILES="-Phive -Phive-thriftserver"
HIVE_PROFILES="-Phive -Phive-thriftserver -Pspark-thriftserver"
# Profiles for publishing snapshots and release to Maven Central
PUBLISH_PROFILES="$BASE_PROFILES $HIVE_PROFILES -Pspark-ganglia-lgpl -Pkinesis-asl"
# Profiles for building binary releases
Expand Down
2 changes: 1 addition & 1 deletion dev/lint-java
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )"
SPARK_ROOT_DIR="$(dirname $SCRIPT_DIR)"

ERRORS=$($SCRIPT_DIR/../build/mvn -Pkinesis-asl -Pmesos -Pkubernetes -Pyarn -Phive -Phive-thriftserver checkstyle:check | grep ERROR)
ERRORS=$($SCRIPT_DIR/../build/mvn -Pkinesis-asl -Pmesos -Pkubernetes -Pyarn -Phive -Phive-thriftserver -Pspark-thriftserver checkstyle:check | grep ERROR)

if test ! -z "$ERRORS"; then
echo -e "Checkstyle checks failed at following occurrences:\n$ERRORS"
Expand Down
2 changes: 1 addition & 1 deletion dev/mima
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ set -e
FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
cd "$FWDIR"

SPARK_PROFILES=${1:-"-Pmesos -Pkubernetes -Pyarn -Pspark-ganglia-lgpl -Pkinesis-asl -Phive-thriftserver -Phive"}
SPARK_PROFILES=${1:-"-Pmesos -Pkubernetes -Pyarn -Pspark-ganglia-lgpl -Pkinesis-asl -Phive-thriftserver -Pspark-thriftserver -Phive"}
TOOLS_CLASSPATH="$(build/sbt -DcopyDependencies=false "export tools/fullClasspath" | tail -n1)"
OLD_DEPS_CLASSPATH="$(build/sbt -DcopyDependencies=false $SPARK_PROFILES "export oldDeps/fullClasspath" | tail -n1)"

Expand Down
2 changes: 1 addition & 1 deletion dev/run-tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def determine_modules_to_test(changed_modules):
>>> x = [x.name for x in determine_modules_to_test([modules.sql])]
>>> x # doctest: +NORMALIZE_WHITESPACE
['sql', 'avro', 'hive', 'mllib', 'sql-kafka-0-10', 'examples', 'hive-thriftserver',
'pyspark-sql', 'repl', 'sparkr', 'pyspark-mllib', 'pyspark-ml']
'service', 'pyspark-sql', 'repl', 'sparkr', 'pyspark-mllib', 'pyspark-ml']
"""
modules_to_test = set()
for module in changed_modules:
Expand Down
2 changes: 1 addition & 1 deletion dev/sbt-checkstyle
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# limitations under the License.
#

SPARK_PROFILES=${1:-"-Pkinesis-asl -Pmesos -Pkubernetes -Pyarn -Phive -Phive-thriftserver"}
SPARK_PROFILES=${1:-"-Pkinesis-asl -Pmesos -Pkubernetes -Pyarn -Phive -Phive-thriftserver -Pspark-thriftserver"}

# NOTE: echo "q" is needed because SBT prompts the user for input on encountering a build file
# with failure (either resolution or compilation); the "q" makes SBT quit.
Expand Down
2 changes: 1 addition & 1 deletion dev/scalastyle
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# limitations under the License.
#

SPARK_PROFILES=${1:-"-Pmesos -Pkubernetes -Pyarn -Pspark-ganglia-lgpl -Pkinesis-asl -Phive-thriftserver -Phive"}
SPARK_PROFILES=${1:-"-Pmesos -Pkubernetes -Pyarn -Pspark-ganglia-lgpl -Pkinesis-asl -Phive-thriftserver -Pspark-thriftserver -Phive"}

# NOTE: echo "q" is needed because SBT prompts the user for input on encountering a build file
# with failure (either resolution or compilation); the "q" makes SBT quit.
Expand Down
15 changes: 15 additions & 0 deletions dev/sparktestsupport/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,21 @@ def __hash__(self):
]
)

service = Module(
name="service",
dependencies=[hive],
source_file_regexes=[
"sql/service",
"sbin/start-thriftserver.sh",
],
build_profile_flags=[
"-Pspark-thriftserver",
],
sbt_test_goals=[
"service/test",
]
)

avro = Module(
name="avro",
dependencies=[sql],
Expand Down
2 changes: 1 addition & 1 deletion dev/test-dependencies.sh
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ export LC_ALL=C
# TODO: This would be much nicer to do in SBT, once SBT supports Maven-style resolution.

# NOTE: These should match those in the release publishing script
HADOOP2_MODULE_PROFILES="-Phive-thriftserver -Pmesos -Pkubernetes -Pyarn -Phive"
HADOOP2_MODULE_PROFILES="-Phive-thriftserver -Pspark-thriftserver -Pmesos -Pkubernetes -Pyarn -Phive"
MVN="build/mvn"
HADOOP_PROFILES=(
hadoop-2.7
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,7 @@ List<String> buildClassPath(String appClassPath) throws IOException {
"sql/core",
"sql/hive",
"sql/hive-thriftserver",
"sql/service",
"streaming"
);
if (prependClasses) {
Expand Down
7 changes: 7 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -2974,6 +2974,13 @@
</modules>
</profile>

<profile>
<id>spark-thriftserver</id>
<modules>
<module>sql/service</module>
</modules>
</profile>

<profile>
<id>hadoop-cloud</id>
<modules>
Expand Down
4 changes: 2 additions & 2 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ object BuildCommons {

private val buildLocation = file(".").getAbsoluteFile.getParentFile

val sqlProjects@Seq(catalyst, sql, hive, hiveThriftServer, tokenProviderKafka010, sqlKafka010, avro) = Seq(
"catalyst", "sql", "hive", "hive-thriftserver", "token-provider-kafka-0-10", "sql-kafka-0-10", "avro"
val sqlProjects@Seq(catalyst, sql, hive, hiveThriftServer, service, tokenProviderKafka010, sqlKafka010, avro) = Seq(
"catalyst", "sql", "hive", "hive-thriftserver", "service", "token-provider-kafka-0-10", "sql-kafka-0-10", "avro"
).map(ProjectRef(buildLocation, _))

val streamingProjects@Seq(streaming, streamingKafka010) =
Expand Down
Loading

0 comments on commit 0e4c3fe

Please sign in to comment.