Skip to content

Commit

Permalink
[SPARK-47258][SQL] Assign names to error classes _LEGACY_ERROR_TEMP_1…
Browse files Browse the repository at this point in the history
…27[0-5]

### What changes were proposed in this pull request?

This PR renames a few error classes related to usage of SHOW CREATE TABLE errors:

_LEGACY_ERROR_TEMP_1270 => UNSUPPORTED_SHOW_CREATE_TABLE.ON_TEMPORARY_VIEW

_LEGACY_ERROR_TEMP_1271 => UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNSUPPORTED_FEATURE

_LEGACY_ERROR_TEMP_1272 => UNSUPPORTED_SHOW_CREATE_TABLE.ON_TRANSACTIONAL_HIVE_TABLE

_LEGACY_ERROR_TEMP_1273 => UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNSUPPORTED_SERDE_CONFIGURATION

_LEGACY_ERROR_TEMP_1274 => UNSUPPORTED_SHOW_CREATE_TABLE.ON_SPARK_DATA_SOURCE_TABLE_WITH_AS_SERDE

_LEGACY_ERROR_TEMP_1275 => UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNSUPPORTED_FEATURE

Also, this PR changes tests in corresponding test suite to use checkError() method which checks the error class name, context, error message parameters, etc.

### Why are the changes needed?

Proper error names and messages improve user experience with Spark SQL.

### Does this PR introduce _any_ user-facing change?

Yes, this PR changes user-facing error class and message.

### How was this patch tested?

By running tests from `ShowCreateTableSuiteBase` and subclasses.

### Was this patch authored or co-authored using generative AI tooling?

No

Closes #46770 from wayneguow/SPARK-47258.

Authored-by: Wei Guo <guow93@gmail.com>
Signed-off-by: Max Gekk <max.gekk@gmail.com>
  • Loading branch information
wayneguow authored and MaxGekk committed Aug 26, 2024
1 parent b1ddec5 commit 7e4d6bd
Show file tree
Hide file tree
Showing 6 changed files with 74 additions and 76 deletions.
72 changes: 37 additions & 35 deletions common/utils/src/main/resources/error/error-conditions.json
Original file line number Diff line number Diff line change
Expand Up @@ -5081,6 +5081,43 @@
},
"sqlState" : "0A000"
},
"UNSUPPORTED_SHOW_CREATE_TABLE" : {
"message" : [
"Unsupported a SHOW CREATE TABLE command."
],
"subClass" : {
"ON_DATA_SOURCE_TABLE_WITH_AS_SERDE" : {
"message" : [
"The table <tableName> is a Spark data source table. Please use SHOW CREATE TABLE without AS SERDE instead."
]
},
"ON_TEMPORARY_VIEW" : {
"message" : [
"The command is not supported on a temporary view <tableName>."
]
},
"ON_TRANSACTIONAL_HIVE_TABLE" : {
"message" : [
"Failed to execute the command against transactional Hive table <tableName>.",
"Please use SHOW CREATE TABLE <tableName> AS SERDE to show Hive DDL instead."
]
},
"WITH_UNSUPPORTED_FEATURE" : {
"message" : [
"Failed to execute the command against table/view <tableName> which is created by Hive and uses the following unsupported features",
"<unsupportedFeatures>"
]
},
"WITH_UNSUPPORTED_SERDE_CONFIGURATION" : {
"message" : [
"Failed to execute the command against the table <tableName> which is created by Hive and uses the following unsupported serde configuration",
"<configs>",
"Please use SHOW CREATE TABLE <tableName> AS SERDE to show Hive DDL instead."
]
}
},
"sqlState" : "0A000"
},
"UNSUPPORTED_STREAMING_OPERATOR_WITHOUT_WATERMARK" : {
"message" : [
"<outputMode> output mode not supported for <statefulOperator> on streaming DataFrames/DataSets without watermark."
Expand Down Expand Up @@ -6302,41 +6339,6 @@
"Failed to truncate table <tableIdentWithDB> when removing data of the path: <path>."
]
},
"_LEGACY_ERROR_TEMP_1270" : {
"message" : [
"SHOW CREATE TABLE is not supported on a temporary view: <table>."
]
},
"_LEGACY_ERROR_TEMP_1271" : {
"message" : [
"Failed to execute SHOW CREATE TABLE against table <table>, which is created by Hive and uses the following unsupported feature(s)",
"<unsupportedFeatures>",
"Please use `SHOW CREATE TABLE <table> AS SERDE` to show Hive DDL instead."
]
},
"_LEGACY_ERROR_TEMP_1272" : {
"message" : [
"SHOW CREATE TABLE doesn't support transactional Hive table. Please use `SHOW CREATE TABLE <table> AS SERDE` to show Hive DDL instead."
]
},
"_LEGACY_ERROR_TEMP_1273" : {
"message" : [
"Failed to execute SHOW CREATE TABLE against table <table>, which is created by Hive and uses the following unsupported serde configuration",
"<configs>",
"Please use `SHOW CREATE TABLE <table> AS SERDE` to show Hive DDL instead."
]
},
"_LEGACY_ERROR_TEMP_1274" : {
"message" : [
"<table> is a Spark data source table. Use `SHOW CREATE TABLE` without `AS SERDE` instead."
]
},
"_LEGACY_ERROR_TEMP_1275" : {
"message" : [
"Failed to execute SHOW CREATE TABLE against table/view <table>, which is created by Hive and uses the following unsupported feature(s)",
"<features>."
]
},
"_LEGACY_ERROR_TEMP_1276" : {
"message" : [
"The logical plan that represents the view is not analyzed."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@ package org.apache.spark.sql.errors

import java.util.Locale

import scala.collection.mutable

import org.apache.hadoop.fs.Path

import org.apache.spark.{SPARK_DOC_ROOT, SparkException, SparkThrowable, SparkUnsupportedOperationException}
Expand Down Expand Up @@ -2959,49 +2957,41 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat

def showCreateTableNotSupportedOnTempView(table: String): Throwable = {
new AnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_1270",
messageParameters = Map("table" -> table))
}

def showCreateTableFailToExecuteUnsupportedFeatureError(table: CatalogTable): Throwable = {
new AnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_1271",
messageParameters = Map(
"unsupportedFeatures" -> table.unsupportedFeatures.map(" - " + _).mkString("\n"),
"table" -> table.identifier.toString))
errorClass = "UNSUPPORTED_SHOW_CREATE_TABLE.ON_TEMPORARY_VIEW",
messageParameters = Map("tableName" -> toSQLId(table)))
}

def showCreateTableNotSupportTransactionalHiveTableError(table: CatalogTable): Throwable = {
new AnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_1272",
messageParameters = Map("table" -> table.identifier.toString))
errorClass = "UNSUPPORTED_SHOW_CREATE_TABLE.ON_TRANSACTIONAL_HIVE_TABLE",
messageParameters = Map("tableName" -> toSQLId(table.identifier.nameParts)))
}

def showCreateTableFailToExecuteUnsupportedConfError(
table: TableIdentifier,
builder: mutable.StringBuilder): Throwable = {
configs: String): Throwable = {
new AnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_1273",
errorClass = "UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNSUPPORTED_SERDE_CONFIGURATION",
messageParameters = Map(
"table" -> table.identifier,
"configs" -> builder.toString()))
"tableName" -> toSQLId(table.nameParts),
"configs" -> configs))
}

def showCreateTableAsSerdeNotAllowedOnSparkDataSourceTableError(
table: TableIdentifier): Throwable = {
new AnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_1274",
messageParameters = Map("table" -> table.toString))
errorClass = "UNSUPPORTED_SHOW_CREATE_TABLE.ON_DATA_SOURCE_TABLE_WITH_AS_SERDE",
messageParameters = Map("tableName" -> toSQLId(table.nameParts)))
}

def showCreateTableOrViewFailToExecuteUnsupportedFeatureError(
table: CatalogTable,
features: Seq[String]): Throwable = {
unsupportedFeatures: Seq[String]): Throwable = {
new AnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_1275",
errorClass = "UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNSUPPORTED_FEATURE",
messageParameters = Map(
"table" -> table.identifier.toString,
"features" -> features.map(" - " + _).mkString("\n")))
"tableName" -> toSQLId(table.identifier.nameParts),
"unsupportedFeatures" -> unsupportedFeatures.map(" - " + _).mkString("\n")))
}

def logicalPlanForViewNotAnalyzedError(): Throwable = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1183,8 +1183,8 @@ case class ShowCreateTableCommand(
} else {
// For a Hive serde table, we try to convert it to Spark DDL.
if (tableMetadata.unsupportedFeatures.nonEmpty) {
throw QueryCompilationErrors.showCreateTableFailToExecuteUnsupportedFeatureError(
tableMetadata)
throw QueryCompilationErrors.showCreateTableOrViewFailToExecuteUnsupportedFeatureError(
tableMetadata, tableMetadata.unsupportedFeatures)
}

if ("true".equalsIgnoreCase(tableMetadata.properties.getOrElse("transactional", "false"))) {
Expand Down Expand Up @@ -1237,7 +1237,8 @@ case class ShowCreateTableCommand(
hiveSerde.outputFormat.foreach { format =>
builder ++= s" OUTPUTFORMAT: $format"
}
throw QueryCompilationErrors.showCreateTableFailToExecuteUnsupportedConfError(table, builder)
throw QueryCompilationErrors.showCreateTableFailToExecuteUnsupportedConfError(
table, builder.toString())
} else {
// TODO: should we keep Hive serde properties?
val newStorage = tableMetadata.storage.copy(properties = Map.empty)
Expand Down Expand Up @@ -1325,9 +1326,9 @@ case class ShowCreateTableAsSerdeCommand(
}

private def showCreateHiveTable(metadata: CatalogTable): String = {
def reportUnsupportedError(features: Seq[String]): Unit = {
def reportUnsupportedError(unsupportedFeatures: Seq[String]): Unit = {
throw QueryCompilationErrors.showCreateTableOrViewFailToExecuteUnsupportedFeatureError(
metadata, features)
metadata, unsupportedFeatures)
}

if (metadata.unsupportedFeatures.nonEmpty) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -162,8 +162,9 @@ trait ShowCreateTableSuiteBase extends command.ShowCreateTableSuiteBase
exception = intercept[AnalysisException] {
getShowCreateDDL(t, true)
},
errorClass = "_LEGACY_ERROR_TEMP_1274",
parameters = Map("table" -> "`spark_catalog`.`ns1`.`tbl`")
errorClass = "UNSUPPORTED_SHOW_CREATE_TABLE.ON_DATA_SOURCE_TABLE_WITH_AS_SERDE",
sqlState = "0A000",
parameters = Map("tableName" -> "`spark_catalog`.`ns1`.`tbl`")
)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -213,20 +213,22 @@ class HiveSQLViewSuite extends SQLViewSuite with TestHiveSingleton {
exception = intercept[AnalysisException] {
sql("SHOW CREATE TABLE v1")
},
errorClass = "_LEGACY_ERROR_TEMP_1271",
errorClass = "UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNSUPPORTED_FEATURE",
sqlState = "0A000",
parameters = Map(
"unsupportedFeatures" -> " - partitioned view",
"table" -> s"`$SESSION_CATALOG_NAME`.`default`.`v1`"
"tableName" -> s"`$SESSION_CATALOG_NAME`.`default`.`v1`",
"unsupportedFeatures" -> " - partitioned view"
)
)
checkError(
exception = intercept[AnalysisException] {
sql("SHOW CREATE TABLE v1 AS SERDE")
},
errorClass = "_LEGACY_ERROR_TEMP_1275",
errorClass = "UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNSUPPORTED_FEATURE",
sqlState = "0A000",
parameters = Map(
"table" -> s"`$SESSION_CATALOG_NAME`.`default`.`v1`",
"features" -> " - partitioned view"
"tableName" -> s"`$SESSION_CATALOG_NAME`.`default`.`v1`",
"unsupportedFeatures" -> " - partitioned view"
)
)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -365,9 +365,10 @@ class ShowCreateTableSuite extends v1.ShowCreateTableSuiteBase with CommandSuite
exception = intercept[AnalysisException] {
checkCreateSparkTableAsHive("t1")
},
errorClass = "_LEGACY_ERROR_TEMP_1273",
errorClass = "UNSUPPORTED_SHOW_CREATE_TABLE.WITH_UNSUPPORTED_SERDE_CONFIGURATION",
sqlState = "0A000",
parameters = Map(
"table" -> "t1",
"tableName" -> "`spark_catalog`.`default`.`t1`",
"configs" -> (" SERDE: org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe " +
"INPUTFORMAT: org.apache.hadoop.hive.ql.io.RCFileInputFormat " +
"OUTPUTFORMAT: org.apache.hadoop.hive.ql.io.RCFileOutputFormat"))
Expand Down Expand Up @@ -437,8 +438,9 @@ class ShowCreateTableSuite extends v1.ShowCreateTableSuiteBase with CommandSuite
exception = intercept[AnalysisException] {
sql("SHOW CREATE TABLE t1")
},
errorClass = "_LEGACY_ERROR_TEMP_1272",
parameters = Map("table" -> "`spark_catalog`.`default`.`t1`")
errorClass = "UNSUPPORTED_SHOW_CREATE_TABLE.ON_TRANSACTIONAL_HIVE_TABLE",
sqlState = "0A000",
parameters = Map("tableName" -> "`spark_catalog`.`default`.`t1`")
)
}
}
Expand Down

0 comments on commit 7e4d6bd

Please sign in to comment.