From 0053d9a1d62c92bc3b339faf517cf31c53c673de Mon Sep 17 00:00:00 2001 From: "chenliang.lu" <31469905+yabola@users.noreply.github.com> Date: Tue, 25 Aug 2020 07:59:28 +0800 Subject: [PATCH] KE-16978 revert cast filter push down (#166) * KE-16978 revert cast filter push down * release r46 (#154) Co-authored-by: chenliang.lu Co-authored-by: Mingming Ge --- .../datasources/DataSourceStrategy.scala | 28 ------------------- .../execution/ExchangeCoordinatorSuite.scala | 2 +- .../execution/adaptive/QueryStageSuite.scala | 2 +- 3 files changed, 2 insertions(+), 30 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala index 5dbfee598a66b..c6000442fae76 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala @@ -440,12 +440,8 @@ object DataSourceStrategy { predicate match { case expressions.EqualTo(a: Attribute, Literal(v, t)) => Some(sources.EqualTo(a.name, convertToScala(v, t))) - case expressions.EqualTo(expressions.Cast(a: Attribute, _, _), Literal(v, t)) => - Some(sources.EqualTo(a.name, convertToScala(v, t))) case expressions.EqualTo(Literal(v, t), a: Attribute) => Some(sources.EqualTo(a.name, convertToScala(v, t))) - case expressions.EqualTo(Literal(v, t), expressions.Cast(a: Attribute, _, _)) => - Some(sources.EqualTo(a.name, convertToScala(v, t))) case expressions.EqualNullSafe(a: Attribute, Literal(v, t)) => Some(sources.EqualNullSafe(a.name, convertToScala(v, t))) @@ -457,45 +453,21 @@ object DataSourceStrategy { case expressions.GreaterThan(Literal(v, t), a: Attribute) => Some(sources.LessThan(a.name, convertToScala(v, t))) - - case expressions.GreaterThan(expressions.Cast(a: Attribute, _, _), Literal(v, t)) => - Some(sources.GreaterThan(a.name, convertToScala(v, t))) - case expressions.GreaterThan(Literal(v, t), expressions.Cast(a: Attribute, _, _)) => - Some(sources.LessThan(a.name, convertToScala(v, t))) - case expressions.LessThan(a: Attribute, Literal(v, t)) => Some(sources.LessThan(a.name, convertToScala(v, t))) case expressions.LessThan(Literal(v, t), a: Attribute) => Some(sources.GreaterThan(a.name, convertToScala(v, t))) - case expressions.LessThan(expressions.Cast(a: Attribute, _, _), Literal(v, t)) => - Some(sources.LessThan(a.name, convertToScala(v, t))) - case expressions.LessThan(Literal(v, t), expressions.Cast(a: Attribute, _, _)) => - Some(sources.GreaterThan(a.name, convertToScala(v, t))) - case expressions.GreaterThanOrEqual(a: Attribute, Literal(v, t)) => Some(sources.GreaterThanOrEqual(a.name, convertToScala(v, t))) - case expressions.GreaterThanOrEqual(Literal(v, t), a: Attribute) => Some(sources.LessThanOrEqual(a.name, convertToScala(v, t))) - - case expressions.GreaterThanOrEqual(expressions.Cast(a: Attribute, _, _), Literal(v, t)) => - Some(sources.GreaterThanOrEqual(a.name, convertToScala(v, t))) - - case expressions.GreaterThanOrEqual(Literal(v, t), expressions.Cast(a: Attribute, _, _)) => - Some(sources.LessThanOrEqual(a.name, convertToScala(v, t))) - case expressions.LessThanOrEqual(a: Attribute, Literal(v, t)) => Some(sources.LessThanOrEqual(a.name, convertToScala(v, t))) case expressions.LessThanOrEqual(Literal(v, t), a: Attribute) => Some(sources.GreaterThanOrEqual(a.name, convertToScala(v, t))) - case expressions.LessThanOrEqual(expressions.Cast(a: Attribute, _, _), Literal(v, t)) => - Some(sources.LessThanOrEqual(a.name, convertToScala(v, t))) - case expressions.LessThanOrEqual(Literal(v, t), expressions.Cast(a: Attribute, _, _)) => - Some(sources.GreaterThanOrEqual(a.name, convertToScala(v, t))) - case expressions.InSet(a: Attribute, set) => val toScala = CatalystTypeConverters.createToScalaConverter(a.dataType) Some(sources.In(a.name, set.toArray.map(toScala))) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/ExchangeCoordinatorSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/ExchangeCoordinatorSuite.scala index d18e72226462f..14cfd55a2c398 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/ExchangeCoordinatorSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/ExchangeCoordinatorSuite.scala @@ -390,7 +390,7 @@ class ExchangeCoordinatorSuite extends SparkFunSuite with BeforeAndAfterAll { .setAppName("test") .set("spark.ui.enabled", "true") .set("spark.driver.allowMultipleContexts", "true") - .set(SQLConf.SHUFFLE_MAX_NUM_POSTSHUFFLE_PARTITIONS.key, "5") + .set(SQLConf.SHUFFLE_PARTITIONS.key, "5") .set(SQLConf.ADAPTIVE_EXECUTION_ENABLED.key, "true") .set(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key, "-1") .set( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/QueryStageSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/QueryStageSuite.scala index 8b0339d2013da..d1c83ef03bd25 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/QueryStageSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/QueryStageSuite.scala @@ -51,7 +51,7 @@ class QueryStageSuite extends SparkFunSuite with BeforeAndAfterAll { .appName("test") .config("spark.ui.enabled", "true") .config("spark.driver.allowMultipleContexts", "true") - .config(SQLConf.SHUFFLE_MAX_NUM_POSTSHUFFLE_PARTITIONS.key, "5") + .config(SQLConf.SHUFFLE_PARTITIONS.key, "5") .config(config.SHUFFLE_STATISTICS_VERBOSE.key, "true") .config(SQLConf.ADAPTIVE_EXECUTION_ENABLED.key, "true") .config(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key, "-1")