Skip to content

Commit

Permalink
[SPARK-49791][SQL] Make DelegatingCatalogExtension more extendable
Browse files Browse the repository at this point in the history
### What changes were proposed in this pull request?

This PR updates `DelegatingCatalogExtension` so that it's more extendable
- `initialize` becomes not final, so that sub-classes can overwrite it
- `delegate` becomes `protected`, so that sub-classes can access it

In addition, this PR fixes a mistake that `DelegatingCatalogExtension` is just a convenient default implementation, it's actually the `CatalogExtension` interface that indicates this catalog implementation will delegate requests to the Spark session catalog. apache#47724 should use `CatalogExtension` instead.

### Why are the changes needed?

Unblock the Iceberg extension.

### Does this PR introduce _any_ user-facing change?

no

### How was this patch tested?

existing tests

### Was this patch authored or co-authored using generative AI tooling?

no

Closes apache#48257 from cloud-fan/catalog.

Lead-authored-by: Wenchen Fan <wenchen@databricks.com>
Co-authored-by: Wenchen Fan <cloud0fan@gmail.com>
Signed-off-by: Dongjoon Hyun <dongjoon@apache.org>
  • Loading branch information
2 people authored and dongjoon-hyun committed Sep 26, 2024
1 parent 54e62a1 commit 339dd5b
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
@Evolving
public abstract class DelegatingCatalogExtension implements CatalogExtension {

private CatalogPlugin delegate;
protected CatalogPlugin delegate;

@Override
public final void setDelegateCatalog(CatalogPlugin delegate) {
Expand All @@ -51,7 +51,7 @@ public String name() {
}

@Override
public final void initialize(String name, CaseInsensitiveStringMap options) {}
public void initialize(String name, CaseInsensitiveStringMap options) {}

@Override
public Set<TableCatalogCapability> capabilities() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.catalyst.util.{quoteIfNeeded, toPrettySQL, ResolveDefaultColumns => DefaultCols}
import org.apache.spark.sql.catalyst.util.ResolveDefaultColumns._
import org.apache.spark.sql.connector.catalog.{CatalogManager, CatalogPlugin, CatalogV2Util, DelegatingCatalogExtension, LookupCatalog, SupportsNamespaces, V1Table}
import org.apache.spark.sql.connector.catalog.{CatalogExtension, CatalogManager, CatalogPlugin, CatalogV2Util, LookupCatalog, SupportsNamespaces, V1Table}
import org.apache.spark.sql.connector.expressions.Transform
import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.execution.command._
Expand Down Expand Up @@ -706,6 +706,6 @@ class ResolveSessionCatalog(val catalogManager: CatalogManager)
private def supportsV1Command(catalog: CatalogPlugin): Boolean = {
isSessionCatalog(catalog) && (
SQLConf.get.getConf(SQLConf.V2_SESSION_CATALOG_IMPLEMENTATION).isEmpty ||
catalog.isInstanceOf[DelegatingCatalogExtension])
catalog.isInstanceOf[CatalogExtension])
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -429,7 +429,7 @@ final class DataFrameWriterImpl[T] private[sql](ds: Dataset[T]) extends DataFram
val canUseV2 = lookupV2Provider().isDefined || (df.sparkSession.sessionState.conf.getConf(
SQLConf.V2_SESSION_CATALOG_IMPLEMENTATION).isDefined &&
!df.sparkSession.sessionState.catalogManager.catalog(CatalogManager.SESSION_CATALOG_NAME)
.isInstanceOf[DelegatingCatalogExtension])
.isInstanceOf[CatalogExtension])

session.sessionState.sqlParser.parseMultipartIdentifier(tableName) match {
case nameParts @ NonSessionCatalogAndIdentifier(catalog, ident) =>
Expand Down

0 comments on commit 339dd5b

Please sign in to comment.