Skip to content

Commit

Permalink
[Spark] Use condition instead of errorClass in checkError() (#3680
Browse files Browse the repository at this point in the history
)

## Description

In the PR, I propose to use the `condition` parameter instead of
`errorClass` in calls of `checkError` because `errorClass` was renamed
in Spark by the PR apache/spark#48027. This PR
fixes compilation issues like:
```
[error]       checkError(
[error]       ^
[error] /home/runner/work/delta/delta/spark/src/test/scala/org/apache/spark/sql/delta/rowtracking/RowTrackingReadWriteSuite.scala:304:7: overloaded method checkError with alternatives:
[error]   (exception: org.apache.spark.SparkThrowable,condition: String,sqlState: Option[String],parameters: Map[String,String],context: RowTrackingReadWriteSuite.this.ExpectedContext)Unit <and>
```

## How was this patch tested?
By compiling locally.

## Does this PR introduce _any_ user-facing changes?
No. This makes changes in tests only.
  • Loading branch information
MaxGekk authored Sep 18, 2024
1 parent d467f52 commit b2339cb
Show file tree
Hide file tree
Showing 35 changed files with 280 additions and 284 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ class DeltaVariantSuite
// check previously thrown error message
checkError(
e,
errorClass = "DELTA_FEATURES_REQUIRE_MANUAL_ENABLEMENT",
"DELTA_FEATURES_REQUIRE_MANUAL_ENABLEMENT",
parameters = Map(
"unsupportedFeatures" -> VariantTypeTableFeature.name,
"supportedFeatures" -> currentFeatures
Expand All @@ -123,13 +123,13 @@ class DeltaVariantSuite
test("VariantType may not be used as a partition column") {
withTable("delta_test") {
checkError(
exception = intercept[AnalysisException] {
intercept[AnalysisException] {
sql(
"""CREATE TABLE delta_test(s STRING, v VARIANT)
|USING delta
|PARTITIONED BY (v)""".stripMargin)
},
errorClass = "INVALID_PARTITION_COLUMN_DATA_TYPE",
"INVALID_PARTITION_COLUMN_DATA_TYPE",
parameters = Map("type" -> "\"VARIANT\"")
)
}
Expand Down Expand Up @@ -516,7 +516,7 @@ class DeltaVariantSuite
}
checkError(
insertException,
errorClass = "DELTA_NOT_NULL_CONSTRAINT_VIOLATED",
"DELTA_NOT_NULL_CONSTRAINT_VIOLATED",
parameters = Map("columnName" -> "v")
)

Expand All @@ -539,7 +539,7 @@ class DeltaVariantSuite
}
checkError(
insertException,
errorClass = "DELTA_VIOLATE_CONSTRAINT_WITH_VALUES",
"DELTA_VIOLATE_CONSTRAINT_WITH_VALUES",
parameters = Map(
"constraintName" -> "variantgtezero",
"expression" -> "(variant_get(v, '$', 'INT') >= 0)", "values" -> " - v : -1"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -463,9 +463,9 @@ class DeltaSqlParserSuite extends SparkFunSuite with SQLHelper {
val parser = new DeltaSqlParser(new SparkSqlParser())
val sql =
clusterByStatement(clause, asSelect, "a int, b string", "CLUSTER BY (a) CLUSTER BY (b)")
checkError(exception = intercept[ParseException] {
checkError(intercept[ParseException] {
parser.parsePlan(sql)
}, errorClass = "DUPLICATE_CLAUSES", parameters = Map("clauseName" -> "CLUSTER BY"))
}, "DUPLICATE_CLAUSES", parameters = Map("clauseName" -> "CLUSTER BY"))
}

test("CLUSTER BY set clustering column property is ignored - " +
Expand All @@ -492,9 +492,9 @@ class DeltaSqlParserSuite extends SparkFunSuite with SQLHelper {
"CLUSTER BY (a) PARTITIONED BY (b)")
val errorMsg = "Clustering and partitioning cannot both be specified. " +
"Please remove PARTITIONED BY if you want to create a Delta table with clustering"
checkError(exception = intercept[ParseException] {
checkError(intercept[ParseException] {
parser.parsePlan(sql)
}, errorClass = "_LEGACY_ERROR_TEMP_0035", parameters = Map("message" -> errorMsg))
}, "_LEGACY_ERROR_TEMP_0035", parameters = Map("message" -> errorMsg))
}

test(s"CLUSTER BY with bucketing - $clause TABLE asSelect = $asSelect") {
Expand All @@ -508,9 +508,9 @@ class DeltaSqlParserSuite extends SparkFunSuite with SQLHelper {
val errorMsg = "Clustering and bucketing cannot both be specified. " +
"Please remove CLUSTERED BY INTO BUCKETS if you " +
"want to create a Delta table with clustering"
checkError(exception = intercept[ParseException] {
checkError(intercept[ParseException] {
parser.parsePlan(sql)
}, errorClass = "_LEGACY_ERROR_TEMP_0035", parameters = Map("message" -> errorMsg))
}, "_LEGACY_ERROR_TEMP_0035", parameters = Map("message" -> errorMsg))
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -492,10 +492,7 @@ class DeltaTableBuilderSuite
.execute()
}

checkError(
exception = e,
errorClass = "DELTA_CLUSTER_BY_WITH_PARTITIONED_BY"
)
checkError(e, "DELTA_CLUSTER_BY_WITH_PARTITIONED_BY")
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -924,17 +924,17 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
.withColumn("a", map('v1, 'v2))
withDeltaTable(df) { tableName =>
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
sql(s"ALTER TABLE $tableName CHANGE COLUMN a.key COMMENT 'a comment'")
},
errorClass = "DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY",
"DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY",
parameters = Map("fieldPath" -> "a.key")
)
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
sql(s"ALTER TABLE $tableName CHANGE COLUMN a.value COMMENT 'a comment'")
},
errorClass = "DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY",
"DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY",
parameters = Map("fieldPath" -> "a.value")
)
}
Expand All @@ -945,10 +945,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
.withColumn("a", array('v1))
withDeltaTable(df) { tableName =>
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
sql(s"ALTER TABLE $tableName CHANGE COLUMN a.element COMMENT 'a comment'")
},
errorClass = "DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY",
"DELTA_UNSUPPORTED_COMMENT_MAP_ARRAY",
parameters = Map("fieldPath" -> "a.element")
)
}
Expand All @@ -959,20 +959,20 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
.withColumn("a", map('v1, 'v2))
withDeltaTable(df) { tableName =>
checkError(
exception = intercept[AnalysisException] {
intercept[AnalysisException] {
sql(s"ALTER TABLE $tableName RENAME COLUMN a.key TO key2")
},
errorClass = "INVALID_FIELD_NAME",
"INVALID_FIELD_NAME",
parameters = Map(
"fieldName" -> "`a`.`key2`",
"path" -> "`a`"
)
)
checkError(
exception = intercept[AnalysisException] {
intercept[AnalysisException] {
sql(s"ALTER TABLE $tableName RENAME COLUMN a.value TO value2")
},
errorClass = "INVALID_FIELD_NAME",
"INVALID_FIELD_NAME",
parameters = Map(
"fieldName" -> "`a`.`value2`",
"path" -> "`a`"
Expand All @@ -986,10 +986,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
.withColumn("a", array('v1))
withDeltaTable(df) { tableName =>
checkError(
exception = intercept[AnalysisException] {
intercept[AnalysisException] {
sql(s"ALTER TABLE $tableName RENAME COLUMN a.element TO element2")
},
errorClass = "INVALID_FIELD_NAME",
"INVALID_FIELD_NAME",
parameters = Map(
"fieldName" -> "`a`.`element2`",
"path" -> "`a`"
Expand All @@ -1008,10 +1008,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
ddlTest("CHANGE COLUMN - incompatible") {
withDeltaTable(Seq((1, "a"), (2, "b")).toDF("v1", "v2")) { tableName =>
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
sql(s"ALTER TABLE $tableName CHANGE COLUMN v1 v1 long")
},
errorClass = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
"DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
parameters = Map(
"fieldPath" -> "v1",
"oldField" -> "INT",
Expand All @@ -1026,10 +1026,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
.withColumn("struct", struct("v1", "v2"))
withDeltaTable(df) { tableName =>
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
sql(s"ALTER TABLE $tableName CHANGE COLUMN struct.v1 v1 long")
},
errorClass = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
"DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
parameters = Map(
"fieldPath" -> "struct.v1",
"oldField" -> "INT",
Expand All @@ -1044,10 +1044,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
.withColumn("a", map('v1, 'v2))
withDeltaTable(df) { tableName =>
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
sql(s"ALTER TABLE $tableName CHANGE COLUMN a.key key long")
},
errorClass = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
"DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
parameters = Map(
"fieldPath" -> "a.key",
"oldField" -> "INT NOT NULL",
Expand All @@ -1062,10 +1062,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
.withColumn("a", map('v1, 'v2))
withDeltaTable(df) { tableName =>
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
sql(s"ALTER TABLE $tableName CHANGE COLUMN a.value value long")
},
errorClass = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
"DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
parameters = Map(
"fieldPath" -> "a.value",
"oldField" -> "INT",
Expand All @@ -1080,10 +1080,10 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
.withColumn("a", array('v1))
withDeltaTable(df) { tableName =>
checkError(
exception = intercept[DeltaAnalysisException] {
intercept[DeltaAnalysisException] {
sql(s"ALTER TABLE $tableName CHANGE COLUMN a.element element long")
},
errorClass = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
"DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
parameters = Map(
"fieldPath" -> "a.element",
"oldField" -> "INT",
Expand Down Expand Up @@ -1383,8 +1383,8 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {
// Changing the nullability of map/array fields is not allowed.
var statement = s"ALTER TABLE $tableName CHANGE COLUMN m.key DROP NOT NULL"
checkError(
exception = intercept[AnalysisException] { sql(statement) },
errorClass = "DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
intercept[AnalysisException] { sql(statement) },
"DELTA_UNSUPPORTED_ALTER_TABLE_CHANGE_COL_OP",
parameters = Map(
"fieldPath" -> "m.key",
"oldField" -> "INT NOT NULL",
Expand All @@ -1394,8 +1394,8 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {

statement = s"ALTER TABLE $tableName CHANGE COLUMN m.value SET NOT NULL"
checkError(
exception = intercept[AnalysisException] { sql(statement) },
errorClass = "_LEGACY_ERROR_TEMP_2330",
intercept[AnalysisException] { sql(statement) },
"_LEGACY_ERROR_TEMP_2330",
parameters = Map(
"fieldName" -> "m.value"
),
Expand All @@ -1404,8 +1404,8 @@ trait DeltaAlterTableTests extends DeltaAlterTableTestBase {

statement = s"ALTER TABLE $tableName CHANGE COLUMN a.element SET NOT NULL"
checkError(
exception = intercept[AnalysisException] { sql(statement) },
errorClass = "_LEGACY_ERROR_TEMP_2330",
intercept[AnalysisException] { sql(statement) },
"_LEGACY_ERROR_TEMP_2330",
parameters = Map(
"fieldName" -> "a.element"
),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -289,10 +289,10 @@ class DeltaCDCSQLSuite extends DeltaCDCSuiteBase with DeltaColumnMappingTestUtil
withTable(tbl) {
spark.range(10).write.format("delta").saveAsTable(tbl)
checkError(
exception = intercept[AnalysisException] {
intercept[AnalysisException] {
sql(s"SELECT * FROM table_changes('$tbl', 0, id)")
},
errorClass = "UNRESOLVED_COLUMN.WITHOUT_SUGGESTION",
"UNRESOLVED_COLUMN.WITHOUT_SUGGESTION",
parameters = Map("objectName" -> "`id`"),
queryContext = Array(ExpectedContext(
fragment = "id",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1944,12 +1944,12 @@ class DeltaColumnMappingSuite extends QueryTest
|TBLPROPERTIES('${DeltaConfigs.COLUMN_MAPPING_MODE.key}'='none')
|""".stripMargin)
}
val errorClass = "DELTA_INVALID_CHARACTERS_IN_COLUMN_NAMES"
val condition = "DELTA_INVALID_CHARACTERS_IN_COLUMN_NAMES"
checkError(
exception = e,
errorClass = errorClass,
e,
condition,
parameters = DeltaThrowableHelper
.getParameterNames(errorClass, errorSubClass = null)
.getParameterNames(condition, errorSubClass = null)
.zip(invalidColumns).toMap
)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -677,8 +677,8 @@ class DeltaDataFrameWriterV2Suite
def verifyNotImplicitCasting(f: => Unit): Unit = {
val e = intercept[DeltaAnalysisException](f)
checkError(
exception = e.getCause.asInstanceOf[DeltaAnalysisException],
errorClass = "DELTA_MERGE_INCOMPATIBLE_DATATYPE",
e.getCause.asInstanceOf[DeltaAnalysisException],
"DELTA_MERGE_INCOMPATIBLE_DATATYPE",
parameters = Map("currentDataType" -> "LongType", "updateDataType" -> "IntegerType"))
}
verifyNotImplicitCasting {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -450,10 +450,10 @@ class DeltaDropColumnSuite extends QueryTest
field <- Seq("m.key", "m.value", "a.element")
}
checkError(
exception = intercept[AnalysisException] {
intercept[AnalysisException] {
sql(s"ALTER TABLE delta_test DROP COLUMN $field")
},
errorClass = "DELTA_UNSUPPORTED_DROP_NESTED_COLUMN_FROM_NON_STRUCT_TYPE",
"DELTA_UNSUPPORTED_DROP_NESTED_COLUMN_FROM_NON_STRUCT_TYPE",
parameters = Map(
"struct" -> "IntegerType"
)
Expand Down
Loading

0 comments on commit b2339cb

Please sign in to comment.