Skip to content

Commit

Permalink
Revert change in integration tests
Browse files Browse the repository at this point in the history
  • Loading branch information
abellina committed Dec 6, 2021
1 parent 6508550 commit 3f31a67
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 3 deletions.
3 changes: 1 addition & 2 deletions integration_tests/run_pyspark_from_build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -165,14 +165,13 @@ else

export PYSP_TEST_spark_driver_extraClassPath="${ALL_JARS// /:}"
export PYSP_TEST_spark_executor_extraClassPath="${ALL_JARS// /:}"
export PYSP_TEST_spark_driver_extraJavaOptions="-ea -Duser.timezone=UTC $COVERAGE_SUBMIT_FLAGS -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5010"
export PYSP_TEST_spark_driver_extraJavaOptions="-ea -Duser.timezone=UTC $COVERAGE_SUBMIT_FLAGS"
export PYSP_TEST_spark_executor_extraJavaOptions='-ea -Duser.timezone=UTC'
export PYSP_TEST_spark_ui_showConsoleProgress='false'
export PYSP_TEST_spark_sql_session_timeZone='UTC'
export PYSP_TEST_spark_sql_shuffle_partitions='12'
# prevent cluster shape to change
export PYSP_TEST_spark_dynamicAllocation_enabled='false'
export PYSP_TEST_spark_rapids_sql_explain='ALL'

# Extract Databricks version from deployed configs. This is set automatically on Databricks
# notebooks but not when running Spark manually.
Expand Down
2 changes: 1 addition & 1 deletion integration_tests/src/main/python/spark_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def with_gpu_session(func, conf={}):
if is_allowing_any_non_gpu():
copy['spark.rapids.sql.test.enabled'] = 'false'
else:
copy['spark.rapids.sql.test.enabled'] = 'false'
copy['spark.rapids.sql.test.enabled'] = 'true'
copy['spark.rapids.sql.test.allowedNonGpu'] = ','.join(get_non_gpu_allowed())

copy['spark.rapids.sql.test.validateExecsInGpuPlan'] = ','.join(get_validate_execs_in_gpu_plan())
Expand Down

0 comments on commit 3f31a67

Please sign in to comment.