Skip to content

Commit

Permalink
[SPARK-49873][SQL] Assign proper error class for _LEGACY_ERROR_TEMP_1325
Browse files Browse the repository at this point in the history
### What changes were proposed in this pull request?

This PR proposes to assign proper error class for _LEGACY_ERROR_TEMP_1325

### Why are the changes needed?

To improve user facing error message by providing proper error condition and sql state

### Does this PR introduce _any_ user-facing change?

Improve user-facing error message

### How was this patch tested?

Updated the existing UT

### Was this patch authored or co-authored using generative AI tooling?

No

Closes apache#48346 from itholic/legacy_1325.

Authored-by: Haejoon Lee <haejoon.lee@databricks.com>
Signed-off-by: Max Gekk <max.gekk@gmail.com>
  • Loading branch information
itholic authored and MaxGekk committed Oct 9, 2024
1 parent b1ff767 commit 97a5aa6
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 17 deletions.
5 changes: 0 additions & 5 deletions common/utils/src/main/resources/error/error-conditions.json
Original file line number Diff line number Diff line change
Expand Up @@ -6696,11 +6696,6 @@
"The pivot column <pivotColumn> has more than <maxValues> distinct values, this could indicate an error. If this was intended, set <config> to at least the number of distinct values of the pivot column."
]
},
"_LEGACY_ERROR_TEMP_1325" : {
"message" : [
"Cannot modify the value of a static config: <key>."
]
},
"_LEGACY_ERROR_TEMP_1327" : {
"message" : [
"Command execution is not supported in runner <runner>."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3388,8 +3388,9 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat

def cannotModifyValueOfStaticConfigError(key: String): Throwable = {
new AnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_1325",
messageParameters = Map("key" -> key))
errorClass = "CANNOT_MODIFY_CONFIG",
messageParameters = Map("key" -> toSQLConf(key), "docroot" -> SPARK_DOC_ROOT)
)
}

def cannotModifyValueOfSparkConfigError(key: String, docroot: String): Throwable = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,8 +233,8 @@ class SQLConfSuite extends QueryTest with SharedSparkSession {
// static sql configs
checkError(
exception = intercept[AnalysisException](sql(s"RESET ${StaticSQLConf.WAREHOUSE_PATH.key}")),
condition = "_LEGACY_ERROR_TEMP_1325",
parameters = Map("key" -> "spark.sql.warehouse.dir"))
condition = "CANNOT_MODIFY_CONFIG",
parameters = Map("key" -> "\"spark.sql.warehouse.dir\"", "docroot" -> SPARK_DOC_ROOT))

}

Expand Down Expand Up @@ -315,10 +315,16 @@ class SQLConfSuite extends QueryTest with SharedSparkSession {
}

test("cannot set/unset static SQL conf") {
val e1 = intercept[AnalysisException](sql(s"SET ${GLOBAL_TEMP_DATABASE.key}=10"))
assert(e1.message.contains("Cannot modify the value of a static config"))
val e2 = intercept[AnalysisException](spark.conf.unset(GLOBAL_TEMP_DATABASE.key))
assert(e2.message.contains("Cannot modify the value of a static config"))
checkError(
exception = intercept[AnalysisException](sql(s"SET ${GLOBAL_TEMP_DATABASE.key}=10")),
condition = "CANNOT_MODIFY_CONFIG",
parameters = Map("key" -> "\"spark.sql.globalTempDatabase\"", "docroot" -> SPARK_DOC_ROOT)
)
checkError(
exception = intercept[AnalysisException](spark.conf.unset(GLOBAL_TEMP_DATABASE.key)),
condition = "CANNOT_MODIFY_CONFIG",
parameters = Map("key" -> "\"spark.sql.globalTempDatabase\"", "docroot" -> SPARK_DOC_ROOT)
)
}

test("SPARK-36643: Show migration guide when attempting SparkConf") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1062,7 +1062,7 @@ class SingleSessionSuite extends HiveThriftServer2TestBase {
statement.executeQuery("SET spark.sql.hive.thriftServer.singleSession=false")
}.getMessage
assert(e.contains(
"Cannot modify the value of a static config: spark.sql.hive.thriftServer.singleSession"))
"CANNOT_MODIFY_CONFIG"))
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,15 @@ import java.util.{Locale, Set}
import com.google.common.io.{Files, FileWriteMode}
import org.apache.hadoop.fs.{FileSystem, Path}

import org.apache.spark.{SparkException, TestUtils}
import org.apache.spark.{SPARK_DOC_ROOT, SparkException, TestUtils}
import org.apache.spark.scheduler.{SparkListener, SparkListenerEvent}
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.EliminateSubqueryAliases
import org.apache.spark.sql.catalyst.catalog.{CatalogTableType, CatalogUtils, HiveTableRelation}
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias}
import org.apache.spark.sql.catalyst.util.TypeUtils.toSQLConf
import org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME
import org.apache.spark.sql.execution.{SparkPlanInfo, TestUncaughtExceptionHandler}
import org.apache.spark.sql.execution.adaptive.{DisableAdaptiveExecutionSuite, EnableAdaptiveExecutionSuite}
Expand Down Expand Up @@ -2461,8 +2462,12 @@ abstract class SQLQuerySuiteBase extends QueryTest with SQLTestUtils with TestHi
"spark.sql.hive.metastore.jars",
"spark.sql.hive.metastore.sharedPrefixes",
"spark.sql.hive.metastore.barrierPrefixes").foreach { key =>
val e = intercept[AnalysisException](sql(s"set $key=abc"))
assert(e.getMessage.contains("Cannot modify the value of a static config"))
checkError(
exception = intercept[AnalysisException](sql(s"set $key=abc")),
condition = "CANNOT_MODIFY_CONFIG",
parameters = Map(
"key" -> toSQLConf(key), "docroot" -> SPARK_DOC_ROOT)
)
}
}

Expand Down

0 comments on commit 97a5aa6

Please sign in to comment.