Skip to content
This repository has been archived by the owner on Sep 18, 2023. It is now read-only.

Commit

Permalink
[NSE-196] clean up configs in unit tests (#271)
Browse files Browse the repository at this point in the history
* remove testing config

* remove unused configs
  • Loading branch information
rui-mo authored Apr 22, 2021
1 parent 7c382d5 commit 7e7d8c2
Show file tree
Hide file tree
Showing 216 changed files with 366 additions and 1,083 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -60,17 +60,11 @@ case class ColumnarGuardRule(conf: SparkConf) extends Rule[SparkPlan] {
val enableColumnarShuffledHashJoin = columnarConf.enableColumnarShuffledHashJoin
val enableColumnarBroadcastExchange = columnarConf.enableColumnarBroadcastExchange
val enableColumnarBroadcastJoin = columnarConf.enableColumnarBroadcastJoin

val testing = columnarConf.isTesting

private def tryConvertToColumnar(plan: SparkPlan): Boolean = {
try {
val columnarPlan = plan match {
case plan: BatchScanExec =>
if (testing) {
// disable ColumnarBatchScanExec according to config
return false
}
if (!enableColumnarBatchScan) return false
new ColumnarBatchScanExec(plan.output, plan.scan)
case plan: FileSourceScanExec =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ import org.apache.spark.sql.internal.SQLConf
case class ColumnarPreOverrides(conf: SparkConf) extends Rule[SparkPlan] {
val columnarConf: ColumnarPluginConfig = ColumnarPluginConfig.getSessionConf
var isSupportAdaptive: Boolean = true
val testing: Boolean = columnarConf.isTesting

def replaceWithColumnarPlan(plan: SparkPlan): SparkPlan = plan match {
case RowGuard(child: CustomShuffleReaderExec) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -143,10 +143,6 @@ class ColumnarPluginConfig(conf: SQLConf) extends Logging {
// The supported customized compression codec is lz4 and fastpfor.
val columnarShuffleUseCustomizedCompressionCodec: String =
conf.getConfString("spark.oap.sql.columnar.shuffle.customizedCompression.codec", "lz4")

// a helper flag to check if it's in unit test
val isTesting: Boolean =
conf.getConfString("spark.oap.sql.columnar.testing", "false").toBoolean

val numaBindingInfo: ColumnarNumaBindingInfo = {
val enableNumaBinding: Boolean =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,8 @@ class SingleLevelAggregateHashMapSuite extends DataFrameAggregateSuite with Befo
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down Expand Up @@ -69,11 +66,8 @@ class TwoLevelAggregateHashMapSuite extends DataFrameAggregateSuite with BeforeA
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down Expand Up @@ -105,11 +99,8 @@ class TwoLevelAggregateHashMapWithVectorizedMapSuite
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,8 @@ class ApproxCountDistinctForIntervalsQuerySuite extends QueryTest with SharedSpa
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,8 @@ class ApproximatePercentileQuerySuite extends QueryTest with SharedSparkSession
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,18 +58,15 @@ class CachedTableSuite extends QueryTest with SQLTestUtils
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
.set("spark.sql.parquet.enableVectorizedReader", "false")
.set("spark.sql.orc.enableVectorizedReader", "false")
.set("spark.sql.inMemoryColumnarStorage.enableVectorizedReader", "false")
.set("spark.oap.sql.columnar.testing", "true")
.set("spark.oap.sql.columnar.batchscan", "false")

setupTestData()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,8 @@ class ColumnExpressionSuite extends QueryTest with SharedSparkSession {
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,8 @@ class ComplexTypesSuite extends QueryTest with SharedSparkSession {
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,8 @@ class ConfigBehaviorSuite extends QueryTest with SharedSparkSession {
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,8 @@ class CountMinSketchAggQuerySuite extends QueryTest with SharedSparkSession {
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,8 @@ class CsvFunctionsSuite extends QueryTest with SharedSparkSession {
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,8 @@ class DataFrameAggregateSuite extends QueryTest
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,8 @@ class DataFrameComplexTypeSuite extends QueryTest with SharedSparkSession {
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,8 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession {
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,8 @@ class DataFrameHintSuite extends AnalysisTest with SharedSparkSession {
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,8 @@ class DataFrameImplicitsSuite extends QueryTest with SharedSparkSession {
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,15 +48,12 @@ class DataFrameJoinSuite extends QueryTest
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
.set("spark.oap.sql.columnar.testing", "true")
.set("spark.oap.sql.columnar.batchscan", "false")

test("join - join using") {
val df = Seq(1, 2, 3).map(i => (i, i.toString)).toDF("int", "str")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,8 @@ class DataFrameNaFunctionsSuite extends QueryTest with SharedSparkSession {
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,11 +40,8 @@ class DataFramePivotSuite extends QueryTest with SharedSparkSession {
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,8 @@ class DataFrameRangeSuite extends QueryTest with SharedSparkSession with Eventua
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,8 @@ class DataFrameSelfJoinSuite extends QueryTest with SharedSparkSession {
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,8 @@ class DataFrameSetOperationsSuite extends QueryTest with SharedSparkSession {
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "true")
.set("spark.sql.columnar.window", "true")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
//.set("spark.oap.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
.set("spark.oap.sql.columnar.sortmergejoin", "true")
Expand Down
Loading

0 comments on commit 7e7d8c2

Please sign in to comment.