diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CsvExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CsvExpressionsSuite.scala index a89cb58c3e03b..fe1ee0e6f338b 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CsvExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CsvExpressionsSuite.scala @@ -228,13 +228,13 @@ class CsvExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper with P } test("verify corrupt column") { - checkExceptionInExpression[AnalysisException]( + checkErrorInExpression[AnalysisException]( CsvToStructs( schema = StructType.fromDDL("i int, _unparsed boolean"), options = Map("columnNameOfCorruptRecord" -> "_unparsed"), child = Literal.create("a"), - timeZoneId = UTC_OPT), - expectedErrMsg = "The field for corrupt records must be string type and nullable") + timeZoneId = UTC_OPT), null, "INVALID_CORRUPT_RECORD_TYPE", + Map("columnName" -> "`_unparsed`", "actualType" -> "\"BOOLEAN\"")) } test("from/to csv with intervals") { diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/JsonExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/JsonExpressionsSuite.scala index adb39fcd568c9..0afaf4ec097c8 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/JsonExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/JsonExpressionsSuite.scala @@ -791,13 +791,13 @@ class JsonExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper with } test("verify corrupt column") { - checkExceptionInExpression[AnalysisException]( + checkErrorInExpression[AnalysisException]( JsonToStructs( schema = StructType.fromDDL("i int, _unparsed boolean"), options = Map("columnNameOfCorruptRecord" -> "_unparsed"), child = Literal.create("""{"i":"a"}"""), - timeZoneId = UTC_OPT), - expectedErrMsg = "The field for corrupt records must be string type and nullable") + timeZoneId = UTC_OPT), null, "INVALID_CORRUPT_RECORD_TYPE", + Map("columnName" -> "`_unparsed`", "actualType" -> "\"BOOLEAN\"")) } def decimalInput(langTag: String): (Decimal, String) = { diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/XmlExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/XmlExpressionsSuite.scala index 66baf6b1430fa..4f38cd0630f2c 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/XmlExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/XmlExpressionsSuite.scala @@ -391,13 +391,13 @@ class XmlExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper with P } test("verify corrupt column") { - checkExceptionInExpression[AnalysisException]( - XmlToStructs( + checkErrorInExpression[AnalysisException]( + JsonToStructs( schema = StructType.fromDDL("i int, _unparsed boolean"), options = Map("columnNameOfCorruptRecord" -> "_unparsed"), child = Literal.create("""{"i":"a"}"""), - timeZoneId = UTC_OPT), - expectedErrMsg = "The field for corrupt records must be string type and nullable") + timeZoneId = UTC_OPT), null, "INVALID_CORRUPT_RECORD_TYPE", + Map("columnName" -> "`_unparsed`", "actualType" -> "\"BOOLEAN\"")) } def decimalInput(langTag: String): (Decimal, String) = {