diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/StringTypeCollated.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/StringTypeCollated.scala index 2f66e57956344..67b65859e6bbb 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/StringTypeCollated.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/StringTypeCollated.scala @@ -24,13 +24,13 @@ import org.apache.spark.sql.types.{AbstractDataType, DataType, StringType} */ abstract class StringTypeCollated extends AbstractDataType { override private[sql] def defaultConcreteType: DataType = StringType + override private[sql] def simpleString: String = "string" } /** * Use StringTypeBinary for expressions supporting only binary collation. */ case object StringTypeBinary extends StringTypeCollated { - override private[sql] def simpleString: String = "string_binary" override private[sql] def acceptsType(other: DataType): Boolean = other.isInstanceOf[StringType] && other.asInstanceOf[StringType].supportsBinaryEquality } @@ -39,7 +39,6 @@ case object StringTypeBinary extends StringTypeCollated { * Use StringTypeBinaryLcase for expressions supporting only binary and lowercase collation. */ case object StringTypeBinaryLcase extends StringTypeCollated { - override private[sql] def simpleString: String = "string_binary_lcase" override private[sql] def acceptsType(other: DataType): Boolean = other.isInstanceOf[StringType] && (other.asInstanceOf[StringType].supportsBinaryEquality || other.asInstanceOf[StringType].isUTF8BinaryLcaseCollation) @@ -49,6 +48,5 @@ case object StringTypeBinaryLcase extends StringTypeCollated { * Use StringTypeAnyCollation for expressions supporting all possible collation types. */ case object StringTypeAnyCollation extends StringTypeCollated { - override private[sql] def simpleString: String = "string_any_collation" override private[sql] def acceptsType(other: DataType): Boolean = other.isInstanceOf[StringType] } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala index cda9676ca58b5..1fbd1ac9a29fd 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala @@ -70,7 +70,7 @@ class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( "paramIndex" -> ordinalNumber(0), - "requiredType" -> "(\"STRING_ANY_COLLATION\" or \"BINARY\" or \"ARRAY\")", + "requiredType" -> "(\"STRING\" or \"BINARY\" or \"ARRAY\")", "inputSql" -> "\"1\"", "inputType" -> "\"INT\"" ) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CollationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CollationSuite.scala index 6163aff662882..62150eaeac54d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/CollationSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/CollationSuite.scala @@ -127,7 +127,7 @@ class CollationSuite extends DatasourceV2SQLBase with AdaptiveSparkPlanHelper { "paramIndex" -> "first", "inputSql" -> "\"1\"", "inputType" -> "\"INT\"", - "requiredType" -> "\"STRING_ANY_COLLATION\""), + "requiredType" -> "\"STRING\""), context = ExpectedContext( fragment = s"collate(1, 'UTF8_BINARY')", start = 7, stop = 31)) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala index 5beac33703586..e42f397cbfc29 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala @@ -1713,7 +1713,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { "paramIndex" -> "second", "inputSql" -> "\"1\"", "inputType" -> "\"INT\"", - "requiredType" -> "\"STRING_ANY_COLLATION\"" + "requiredType" -> "\"STRING\"" ), queryContext = Array(ExpectedContext("", "", 0, 15, "array_join(x, 1)")) ) @@ -1727,7 +1727,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { "paramIndex" -> "third", "inputSql" -> "\"1\"", "inputType" -> "\"INT\"", - "requiredType" -> "\"STRING_ANY_COLLATION\"" + "requiredType" -> "\"STRING\"" ), queryContext = Array(ExpectedContext("", "", 0, 21, "array_join(x, ', ', 1)")) ) @@ -1987,7 +1987,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { "paramIndex" -> "first", "inputSql" -> "\"struct(1, a)\"", "inputType" -> "\"STRUCT\"", - "requiredType" -> "(\"STRING_ANY_COLLATION\" or \"ARRAY\")" + "requiredType" -> "(\"STRING\" or \"ARRAY\")" ), queryContext = Array(ExpectedContext("", "", 7, 29, "reverse(struct(1, 'a'))")) ) @@ -2002,7 +2002,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { "paramIndex" -> "first", "inputSql" -> "\"map(1, a)\"", "inputType" -> "\"MAP\"", - "requiredType" -> "(\"STRING_ANY_COLLATION\" or \"ARRAY\")" + "requiredType" -> "(\"STRING\" or \"ARRAY\")" ), queryContext = Array(ExpectedContext("", "", 7, 26, "reverse(map(1, 'a'))")) ) @@ -2552,7 +2552,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { parameters = Map( "sqlExpr" -> "\"concat(map(1, 2), map(3, 4))\"", "paramIndex" -> "first", - "requiredType" -> "(\"STRING_ANY_COLLATION\" or \"BINARY\" or \"ARRAY\")", + "requiredType" -> "(\"STRING\" or \"BINARY\" or \"ARRAY\")", "inputSql" -> "\"map(1, 2)\"", "inputType" -> "\"MAP\"" ),