From 8848af263570a82a985037a95468efff050c61b0 Mon Sep 17 00:00:00 2001 From: HyukjinKwon Date: Wed, 28 Aug 2019 10:39:21 +0900 Subject: [PATCH] [SPARK-28881][PYTHON][TESTS][FOLLOW-UP] Use SparkSession(SparkContext(...)) to prevent for Spark conf to affect other tests ### What changes were proposed in this pull request? This PR proposes to match the test with branch-2.4. See https://github.com/apache/spark/pull/25593#discussion_r318109047 Seems using `SparkSession.builder` with Spark conf possibly affects other tests. ### Why are the changes needed? To match with branch-2.4 and to make easier to backport. ### Does this PR introduce any user-facing change? No. ### How was this patch tested? Test was fixed. Closes #25603 from HyukjinKwon/SPARK-28881-followup. Authored-by: HyukjinKwon Signed-off-by: HyukjinKwon --- python/pyspark/sql/tests/test_arrow.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/python/pyspark/sql/tests/test_arrow.py b/python/pyspark/sql/tests/test_arrow.py index 50c82b0b5f88a..f32513771cbcf 100644 --- a/python/pyspark/sql/tests/test_arrow.py +++ b/python/pyspark/sql/tests/test_arrow.py @@ -22,6 +22,7 @@ import unittest import warnings +from pyspark import SparkContext, SparkConf from pyspark.sql import Row, SparkSession from pyspark.sql.functions import udf from pyspark.sql.types import * @@ -430,11 +431,8 @@ class MaxResultArrowTests(unittest.TestCase): @classmethod def setUpClass(cls): - cls.spark = SparkSession.builder \ - .master("local[4]") \ - .appName(cls.__name__) \ - .config("spark.driver.maxResultSize", "10k") \ - .getOrCreate() + cls.spark = SparkSession(SparkContext( + 'local[4]', cls.__name__, conf=SparkConf().set("spark.driver.maxResultSize", "10k"))) # Explicitly enable Arrow and disable fallback. cls.spark.conf.set("spark.sql.execution.arrow.pyspark.enabled", "true")