diff --git a/spark/src/main/resources/error/delta-error-classes.json b/spark/src/main/resources/error/delta-error-classes.json index 05e3f1f405..061e136f18 100644 --- a/spark/src/main/resources/error/delta-error-classes.json +++ b/spark/src/main/resources/error/delta-error-classes.json @@ -236,6 +236,12 @@ ], "sqlState" : "22003" }, + "DELTA_CANNOT_RESTORE_TIMESTAMP_EARLIER" : { + "message" : [ + "Cannot restore table to timestamp () as it is before the earliest version available. Please use a timestamp after ()." + ], + "sqlState" : "22003" + }, "DELTA_CANNOT_RESTORE_TIMESTAMP_GREATER" : { "message" : [ "Cannot restore table to timestamp () as it is after the latest version available. Please use a timestamp before ()" @@ -1677,6 +1683,12 @@ ], "sqlState" : "0A000" }, + "DELTA_OPERATION_ON_VIEW_NOT_ALLOWED" : { + "message" : [ + "Operation not allowed: cannot be performed on a view." + ], + "sqlState" : "0AKDC" + }, "DELTA_OVERWRITE_SCHEMA_WITH_DYNAMIC_PARTITION_OVERWRITE" : { "message" : [ "'overwriteSchema' cannot be used in dynamic partition overwrite mode." @@ -2681,5 +2693,25 @@ "TBLPROPERTIES('delta.feature.allowColumnDefaults' = 'supported')." ], "sqlState" : "0AKDE" + }, + "_LEGACY_ERROR_TEMP_DELTA_0001" : { + "message" : [ + "Cannot use '' as the name of a CHECK constraint." + ] + }, + "_LEGACY_ERROR_TEMP_DELTA_0002" : { + "message" : [ + "Cannot create bloom filter index, invalid parameter value: ''." + ] + }, + "_LEGACY_ERROR_TEMP_DELTA_0003" : { + "message" : [ + "You are trying to convert a table which already has a delta log where the table properties in the catalog don't match the configuration in the delta log.", + "Table properties in catalog:", + "", + "Delta configuration:", + "", + "If you would like to merge the configurations (update existing fields and insert new ones), set the SQL configuration `spark.databricks.delta.convert.metadataCheck.enabled` to false." + ] } } diff --git a/spark/src/main/scala/org/apache/spark/sql/delta/DeltaErrors.scala b/spark/src/main/scala/org/apache/spark/sql/delta/DeltaErrors.scala index ed3515c666..2326cff176 100644 --- a/spark/src/main/scala/org/apache/spark/sql/delta/DeltaErrors.scala +++ b/spark/src/main/scala/org/apache/spark/sql/delta/DeltaErrors.scala @@ -295,7 +295,10 @@ trait DeltaErrorsBase } def invalidConstraintName(name: String): AnalysisException = { - new AnalysisException(s"Cannot use '$name' as the name of a CHECK constraint.") + new DeltaAnalysisException( + errorClass = "_LEGACY_ERROR_TEMP_DELTA_0001", + messageParameters = Array(name) + ) } def nonexistentConstraint(constraintName: String, tableName: String): AnalysisException = { @@ -1096,8 +1099,10 @@ trait DeltaErrorsBase } def bloomFilterInvalidParameterValueException(message: String): Throwable = { - new AnalysisException( - s"Cannot create bloom filter index, invalid parameter value: $message") + new DeltaAnalysisException( + errorClass = "_LEGACY_ERROR_TEMP_DELTA_0002", + messageParameters = Array(message) + ) } def bloomFilterDropOnNonIndexedColumnException(name: String): Throwable = { @@ -1221,15 +1226,10 @@ trait DeltaErrorsBase def prettyMap(m: Map[String, String]): String = { m.map(e => s"${e._1}=${e._2}").mkString("[", ", ", "]") } - new AnalysisException( - s"""You are trying to convert a table which already has a delta log where the table - |properties in the catalog don't match the configuration in the delta log. - |Table properties in catalog: ${prettyMap(tableProperties)} - |Delta configuration: ${prettyMap{deltaConfiguration}} - |If you would like to merge the configurations (update existing fields and insert new - |ones), set the SQL configuration - |spark.databricks.delta.convert.metadataCheck.enabled to false. - """.stripMargin) + new DeltaAnalysisException( + errorClass = "_LEGACY_ERROR_TEMP_DELTA_0003", + messageParameters = Array(prettyMap(tableProperties), prettyMap(deltaConfiguration)) + ) } def createExternalTableWithoutLogException( @@ -1399,9 +1399,9 @@ trait DeltaErrorsBase def restoreTimestampBeforeEarliestException( userTimestamp: String, earliestTimestamp: String): Throwable = { - new AnalysisException( - s"Cannot restore table to timestamp ($userTimestamp) as it is before the earliest version " + - s"available. Please use a timestamp after ($earliestTimestamp)" + new DeltaAnalysisException( + errorClass = "DELTA_CANNOT_RESTORE_TIMESTAMP_EARLIER", + messageParameters = Array(userTimestamp, earliestTimestamp) ) } @@ -1579,7 +1579,10 @@ trait DeltaErrorsBase } def viewNotSupported(operationName: String): Throwable = { - new AnalysisException(s"Operation $operationName can not be performed on a view") + new DeltaAnalysisException( + errorClass = "DELTA_OPERATION_ON_VIEW_NOT_ALLOWED", + messageParameters = Array(operationName) + ) } def postCommitHookFailedException( diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaErrorsSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaErrorsSuite.scala index 1334d3d977..d8c26c493f 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaErrorsSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaErrorsSuite.scala @@ -2826,6 +2826,66 @@ trait DeltaErrorsSuiteBase "been set in commit version 1225.") ) } + { + val e = intercept[DeltaAnalysisException] { + throw DeltaErrors.invalidConstraintName("foo") + } + checkErrorMessage( + e, + Some("_LEGACY_ERROR_TEMP_DELTA_0001"), + None, + Some("Cannot use 'foo' as the name of a CHECK constraint.")) + } + { + val e = intercept[DeltaAnalysisException] { + throw DeltaErrors.bloomFilterInvalidParameterValueException("foo") + } + checkErrorMessage( + e, + Some("_LEGACY_ERROR_TEMP_DELTA_0002"), + None, + Some("Cannot create bloom filter index, invalid parameter value: 'foo'.")) + } + { + val e = intercept[DeltaAnalysisException] { + throw DeltaErrors.convertMetastoreMetadataMismatchException( + tableProperties = Map("delta.prop1" -> "foo"), + deltaConfiguration = Map("delta.config1" -> "bar")) + } + checkErrorMessage( + e, + Some("_LEGACY_ERROR_TEMP_DELTA_0003"), + None, + Some( + s"""You are trying to convert a table which already has a delta log where the table properties in the catalog don't match the configuration in the delta log. + |Table properties in catalog: + |[delta.prop1=foo] + |Delta configuration: + |[delta.config1=bar] + |If you would like to merge the configurations (update existing fields and insert new ones), set the SQL configuration `spark.databricks.delta.convert.metadataCheck.enabled` to false.""".stripMargin)) + } + { + val e = intercept[DeltaAnalysisException] { + throw DeltaErrors.restoreTimestampBeforeEarliestException("2022-02-02 12:12:12", + "2022-02-02 12:12:14") + } + checkErrorMessage( + e, + Some("DELTA_CANNOT_RESTORE_TIMESTAMP_EARLIER"), + Some("22003"), + Some("Cannot restore table to timestamp (2022-02-02 12:12:12) as it is before the " + + "earliest version available. Please use a timestamp after (2022-02-02 12:12:14).")) + } + { + val e = intercept[DeltaAnalysisException] { + throw DeltaErrors.viewNotSupported("FOO_OP") + } + checkErrorMessage( + e, + Some("DELTA_OPERATION_ON_VIEW_NOT_ALLOWED"), + Some("0AKDC"), + Some("Operation not allowed: FOO_OP cannot be performed on a view.")) + } } }