Skip to content

Commit

Permalink
refactor first batch and test
Browse files Browse the repository at this point in the history
  • Loading branch information
scottsand-db committed Mar 25, 2024
1 parent 283ac02 commit 3aa661a
Show file tree
Hide file tree
Showing 3 changed files with 111 additions and 16 deletions.
32 changes: 32 additions & 0 deletions spark/src/main/resources/error/delta-error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,12 @@
],
"sqlState" : "22003"
},
"DELTA_CANNOT_RESTORE_TIMESTAMP_EARLIER" : {
"message" : [
"Cannot restore table to timestamp (<requestedTimestamp>) as it is before the earliest version available. Please use a timestamp after (<earliestTimestamp>)."
],
"sqlState" : "22003"
},
"DELTA_CANNOT_RESTORE_TIMESTAMP_GREATER" : {
"message" : [
"Cannot restore table to timestamp (<requestedTimestamp>) as it is after the latest version available. Please use a timestamp before (<latestTimestamp>)"
Expand Down Expand Up @@ -1677,6 +1683,12 @@
],
"sqlState" : "0A000"
},
"DELTA_OPERATION_ON_VIEW_NOT_ALLOWED" : {
"message" : [
"Operation not allowed: <operation> cannot be performed on a view."
],
"sqlState" : "0AKDC"
},
"DELTA_OVERWRITE_SCHEMA_WITH_DYNAMIC_PARTITION_OVERWRITE" : {
"message" : [
"'overwriteSchema' cannot be used in dynamic partition overwrite mode."
Expand Down Expand Up @@ -2681,5 +2693,25 @@
"TBLPROPERTIES('delta.feature.allowColumnDefaults' = 'supported')."
],
"sqlState" : "0AKDE"
},
"_LEGACY_ERROR_TEMP_DELTA_0001" : {
"message" : [
"Cannot use '<name>' as the name of a CHECK constraint."
]
},
"_LEGACY_ERROR_TEMP_DELTA_0002" : {
"message" : [
"Cannot create bloom filter index, invalid parameter value: '<message>'."
]
},
"_LEGACY_ERROR_TEMP_DELTA_0003" : {
"message" : [
"You are trying to convert a table which already has a delta log where the table properties in the catalog don't match the configuration in the delta log.",
"Table properties in catalog:",
"<tableProperties>",
"Delta configuration:",
"<configuration>",
"If you would like to merge the configurations (update existing fields and insert new ones), set the SQL configuration `spark.databricks.delta.convert.metadataCheck.enabled` to false."
]
}
}
35 changes: 19 additions & 16 deletions spark/src/main/scala/org/apache/spark/sql/delta/DeltaErrors.scala
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,10 @@ trait DeltaErrorsBase
}

def invalidConstraintName(name: String): AnalysisException = {
new AnalysisException(s"Cannot use '$name' as the name of a CHECK constraint.")
new DeltaAnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_DELTA_0001",
messageParameters = Array(name)
)
}

def nonexistentConstraint(constraintName: String, tableName: String): AnalysisException = {
Expand Down Expand Up @@ -1096,8 +1099,10 @@ trait DeltaErrorsBase
}

def bloomFilterInvalidParameterValueException(message: String): Throwable = {
new AnalysisException(
s"Cannot create bloom filter index, invalid parameter value: $message")
new DeltaAnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_DELTA_0002",
messageParameters = Array(message)
)
}

def bloomFilterDropOnNonIndexedColumnException(name: String): Throwable = {
Expand Down Expand Up @@ -1221,15 +1226,10 @@ trait DeltaErrorsBase
def prettyMap(m: Map[String, String]): String = {
m.map(e => s"${e._1}=${e._2}").mkString("[", ", ", "]")
}
new AnalysisException(
s"""You are trying to convert a table which already has a delta log where the table
|properties in the catalog don't match the configuration in the delta log.
|Table properties in catalog: ${prettyMap(tableProperties)}
|Delta configuration: ${prettyMap{deltaConfiguration}}
|If you would like to merge the configurations (update existing fields and insert new
|ones), set the SQL configuration
|spark.databricks.delta.convert.metadataCheck.enabled to false.
""".stripMargin)
new DeltaAnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_DELTA_0003",
messageParameters = Array(prettyMap(tableProperties), prettyMap(deltaConfiguration))
)
}

def createExternalTableWithoutLogException(
Expand Down Expand Up @@ -1399,9 +1399,9 @@ trait DeltaErrorsBase
def restoreTimestampBeforeEarliestException(
userTimestamp: String,
earliestTimestamp: String): Throwable = {
new AnalysisException(
s"Cannot restore table to timestamp ($userTimestamp) as it is before the earliest version " +
s"available. Please use a timestamp after ($earliestTimestamp)"
new DeltaAnalysisException(
errorClass = "DELTA_CANNOT_RESTORE_TIMESTAMP_EARLIER",
messageParameters = Array(userTimestamp, earliestTimestamp)
)
}

Expand Down Expand Up @@ -1579,7 +1579,10 @@ trait DeltaErrorsBase
}

def viewNotSupported(operationName: String): Throwable = {
new AnalysisException(s"Operation $operationName can not be performed on a view")
new DeltaAnalysisException(
errorClass = "DELTA_OPERATION_ON_VIEW_NOT_ALLOWED",
messageParameters = Array(operationName)
)
}

def postCommitHookFailedException(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2826,6 +2826,66 @@ trait DeltaErrorsSuiteBase
"been set in commit version 1225.")
)
}
{
val e = intercept[DeltaAnalysisException] {
throw DeltaErrors.invalidConstraintName("foo")
}
checkErrorMessage(
e,
Some("_LEGACY_ERROR_TEMP_DELTA_0001"),
None,
Some("Cannot use 'foo' as the name of a CHECK constraint."))
}
{
val e = intercept[DeltaAnalysisException] {
throw DeltaErrors.bloomFilterInvalidParameterValueException("foo")
}
checkErrorMessage(
e,
Some("_LEGACY_ERROR_TEMP_DELTA_0002"),
None,
Some("Cannot create bloom filter index, invalid parameter value: 'foo'."))
}
{
val e = intercept[DeltaAnalysisException] {
throw DeltaErrors.convertMetastoreMetadataMismatchException(
tableProperties = Map("delta.prop1" -> "foo"),
deltaConfiguration = Map("delta.config1" -> "bar"))
}
checkErrorMessage(
e,
Some("_LEGACY_ERROR_TEMP_DELTA_0003"),
None,
Some(
s"""You are trying to convert a table which already has a delta log where the table properties in the catalog don't match the configuration in the delta log.
|Table properties in catalog:
|[delta.prop1=foo]
|Delta configuration:
|[delta.config1=bar]
|If you would like to merge the configurations (update existing fields and insert new ones), set the SQL configuration `spark.databricks.delta.convert.metadataCheck.enabled` to false.""".stripMargin))
}
{
val e = intercept[DeltaAnalysisException] {
throw DeltaErrors.restoreTimestampBeforeEarliestException("2022-02-02 12:12:12",
"2022-02-02 12:12:14")
}
checkErrorMessage(
e,
Some("DELTA_CANNOT_RESTORE_TIMESTAMP_EARLIER"),
Some("22003"),
Some("Cannot restore table to timestamp (2022-02-02 12:12:12) as it is before the " +
"earliest version available. Please use a timestamp after (2022-02-02 12:12:14)."))
}
{
val e = intercept[DeltaAnalysisException] {
throw DeltaErrors.viewNotSupported("FOO_OP")
}
checkErrorMessage(
e,
Some("DELTA_OPERATION_ON_VIEW_NOT_ALLOWED"),
Some("0AKDC"),
Some("Operation not allowed: FOO_OP cannot be performed on a view."))
}
}
}

Expand Down

0 comments on commit 3aa661a

Please sign in to comment.