From 19046347c1534f6e7acdd9f46ac175eda12e1f5a Mon Sep 17 00:00:00 2001 From: panbingkun Date: Wed, 10 Jul 2024 10:26:43 +0800 Subject: [PATCH] [SPARK-48851][SQL] Attach full name for `SCHEMA_NOT_FOUND` --- .../sql/catalyst/analysis/noSuchItemsExceptions.scala | 7 +++++++ .../apache/spark/sql/catalyst/catalog/SessionCatalog.scala | 7 +++++-- .../spark/sql/connector/catalog/InMemoryTableCatalog.scala | 2 +- .../sql/execution/datasources/v2/V2SessionCatalog.scala | 4 +++- .../command/AlterNamespaceUnsetPropertiesSuiteBase.scala | 2 +- 5 files changed, 17 insertions(+), 5 deletions(-) diff --git a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/noSuchItemsExceptions.scala b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/noSuchItemsExceptions.scala index ac22d26ccfd18..75b6b123cc029 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/noSuchItemsExceptions.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/noSuchItemsExceptions.scala @@ -52,6 +52,13 @@ class NoSuchDatabaseException private( messageParameters = Map("schemaName" -> quoteIdentifier(db)), cause = None) } + + def this(db: Seq[String]) = { + this( + errorClass = "SCHEMA_NOT_FOUND", + messageParameters = Map("schemaName" -> quoteNameParts(db)), + cause = None) + } } // any changes to this class should be backward compatible as it may be used by external connectors diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala index 0e0852d0a550d..edf28bd5e45ca 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala @@ -71,7 +71,9 @@ class SessionCatalog( functionExpressionBuilder: FunctionExpressionBuilder, cacheSize: Int = SQLConf.get.tableRelationCacheSize, cacheTTL: Long = SQLConf.get.metadataCacheTTL, - defaultDatabase: String = SQLConf.get.defaultDatabase) extends SQLConfHelper with Logging { + defaultDatabase: String = SQLConf.get.defaultDatabase, + var catalogName: Option[String] = Some(CatalogManager.SESSION_CATALOG_NAME)) + extends SQLConfHelper with Logging { import SessionCatalog._ import CatalogTypes.TablePartitionSpec @@ -250,7 +252,8 @@ class SessionCatalog( private def requireDbExists(db: String): Unit = { if (!databaseExists(db)) { - throw new NoSuchDatabaseException(db) + val nameParts = catalogName.map(Seq(_, db)).getOrElse(Seq(db)) + throw new NoSuchDatabaseException(nameParts) } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryTableCatalog.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryTableCatalog.scala index 654fa0719cf82..69bc68a9f1611 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryTableCatalog.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/InMemoryTableCatalog.scala @@ -210,7 +210,7 @@ class InMemoryTableCatalog extends BasicInMemoryTableCatalog with SupportsNamesp case _ if namespaceExists(namespace) => util.Collections.emptyMap[String, String] case _ => - throw new NoSuchNamespaceException(namespace) + throw new NoSuchNamespaceException(name +: namespace) } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala index e619c59a7540c..e4db2282b0a8a 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala @@ -52,7 +52,9 @@ class V2SessionCatalog(catalog: SessionCatalog) override def name: String = CatalogManager.SESSION_CATALOG_NAME // This class is instantiated by Spark, so `initialize` method will not be called. - override def initialize(name: String, options: CaseInsensitiveStringMap): Unit = {} + override def initialize(name: String, options: CaseInsensitiveStringMap): Unit = { + catalog.catalogName = Some(name) + } override def capabilities(): util.Set[TableCatalogCapability] = { Set( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceUnsetPropertiesSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceUnsetPropertiesSuiteBase.scala index 1d43cc5938487..c00f3f99f41f9 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceUnsetPropertiesSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterNamespaceUnsetPropertiesSuiteBase.scala @@ -57,7 +57,7 @@ trait AlterNamespaceUnsetPropertiesSuiteBase extends QueryTest with DDLCommandTe } checkError(e, errorClass = "SCHEMA_NOT_FOUND", - parameters = Map("schemaName" -> s"`$ns`")) + parameters = Map("schemaName" -> s"`$catalog`.`$ns`")) } test("basic test") {