From 3350aa808316408aec382bfdc93636da546ff843 Mon Sep 17 00:00:00 2001 From: Assaf Bern Date: Wed, 30 Nov 2022 14:08:59 +0200 Subject: [PATCH] Add a builder for HiveTableHandle --- .../io/trino/plugin/hive/HiveMetadata.java | 71 ++-- .../plugin/hive/HivePartitionManager.java | 28 +- .../io/trino/plugin/hive/HiveTableHandle.java | 356 ++++++++++-------- .../hive/PartitionsSystemTableProvider.java | 16 +- .../trino/plugin/hive/AbstractTestHive.java | 9 +- .../trino/plugin/hive/TestHivePageSink.java | 5 +- .../plugin/hive/TestHiveTableHandle.java | 9 +- .../TestNodeLocalDynamicSplitPruning.java | 15 +- .../hive/benchmark/AbstractFileFormat.java | 9 +- .../TestConnectorPushdownRulesWithHive.java | 44 ++- ...stHiveProjectionPushdownIntoTableScan.java | 10 +- 11 files changed, 320 insertions(+), 252 deletions(-) diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java index f041f9ba2dce..b6eeb44d7226 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java @@ -482,13 +482,14 @@ public HiveTableHandle getTableHandle(ConnectorSession session, SchemaTableName verifyOnline(tableName, Optional.empty(), getProtectMode(table), table.getParameters()); - return new HiveTableHandle( - tableName.getSchemaName(), - tableName.getTableName(), - table.getParameters(), - getPartitionKeyColumnHandles(table, typeManager), - getRegularColumnHandles(table, typeManager, getTimestampPrecision(session)), - getHiveBucketHandle(session, table, typeManager)); + return HiveTableHandle.builder() + .withSchemaName(tableName.getSchemaName()) + .withTableName(tableName.getTableName()) + .withTableParameters(Optional.of(table.getParameters())) + .withPartitionColumns(getPartitionKeyColumnHandles(table, typeManager)) + .withDataColumns(getRegularColumnHandles(table, typeManager, getTimestampPrecision(session))) + .withBucketHandle(getHiveBucketHandle(session, table, typeManager)) + .build(); } @Override @@ -514,7 +515,9 @@ public ConnectorAnalyzeMetadata getStatisticsCollectionMetadata(ConnectorSession } } - handle = handle.withAnalyzePartitionValues(list); + handle = HiveTableHandle.buildFrom(handle) + .withAnalyzePartitionValues(Optional.of(list)) + .build(); HivePartitionResult partitions = partitionManager.getPartitions(handle, list); handle = partitionManager.applyPartitionResult(handle, partitions, alwaysTrue()); } @@ -1886,7 +1889,9 @@ public ConnectorTableHandle beginUpdate(ConnectorSession session, ConnectorTable HiveUpdateProcessor updateProcessor = new HiveUpdateProcessor(allDataColumns, hiveUpdatedColumns); AcidTransaction transaction = metastore.beginUpdate(session, table, updateProcessor); - HiveTableHandle updateHandle = hiveTableHandle.withTransaction(transaction); + HiveTableHandle updateHandle = HiveTableHandle.buildFrom(hiveTableHandle) + .withTransaction(transaction) + .build(); WriteInfo writeInfo = locationService.getQueryWriteInfo(locationHandle); metastore.declareIntentionToWrite(session, writeInfo.getWriteMode(), writeInfo.getWritePath(), tableName); @@ -1947,7 +1952,11 @@ public ConnectorMergeTableHandle beginMerge(ConnectorSession session, ConnectorT } HiveInsertTableHandle insertHandle = beginInsertOrMerge(session, tableHandle, retryMode, "Merging into", true); - return new HiveMergeTableHandle(hiveTableHandle.withTransaction(insertHandle.getTransaction()), insertHandle); + return new HiveMergeTableHandle( + HiveTableHandle.buildFrom(hiveTableHandle) + .withTransaction(insertHandle.getTransaction()) + .build(), + insertHandle); } @Override @@ -2420,9 +2429,10 @@ private BeginTableExecuteResult( hiveExecuteHandle .withWriteDeclarationId(writeDeclarationId), - hiveSourceTableHandle + HiveTableHandle.buildFrom(hiveSourceTableHandle) .withMaxScannedFileSize(hiveExecuteHandle.getMaxScannedFileSize()) - .withRecordScannedFiles(true)); + .withRecordScannedFiles(true) + .build()); } @Override @@ -2770,7 +2780,9 @@ public ConnectorTableHandle beginDelete(ConnectorSession session, ConnectorTable WriteInfo writeInfo = locationService.getQueryWriteInfo(locationHandle); metastore.declareIntentionToWrite(session, writeInfo.getWriteMode(), writeInfo.getWritePath(), handle.getSchemaTableName()); - return handle.withTransaction(transaction); + return HiveTableHandle.buildFrom(handle) + .withTransaction(transaction) + .build(); } @Override @@ -3042,7 +3054,9 @@ public Optional> applyProjecti ((HiveColumnHandle) assignment.getValue()).getType())) .collect(toImmutableList()); return Optional.of(new ProjectionApplicationResult<>( - hiveTableHandle.withProjectedColumns(projectedColumns), + HiveTableHandle.buildFrom(hiveTableHandle) + .withProjectedColumns(projectedColumns) + .build(), projections, assignmentsList, false)); @@ -3089,7 +3103,9 @@ public Optional> applyProjecti List outputAssignments = ImmutableList.copyOf(newAssignments.values()); return Optional.of(new ProjectionApplicationResult<>( - hiveTableHandle.withProjectedColumns(projectedColumnsBuilder.build()), + HiveTableHandle.buildFrom(hiveTableHandle) + .withProjectedColumns(projectedColumnsBuilder.build()) + .build(), newProjections, outputAssignments, false)); @@ -3213,29 +3229,16 @@ public ConnectorTableHandle makeCompatiblePartitioning(ConnectorSession session, largerBucketCount % smallerBucketCount == 0 && Integer.bitCount(largerBucketCount / smallerBucketCount) == 1, "The requested partitioning is not a valid alternative for the table layout"); - return new HiveTableHandle( - hiveTable.getSchemaName(), - hiveTable.getTableName(), - hiveTable.getTableParameters(), - hiveTable.getPartitionColumns(), - hiveTable.getDataColumns(), - hiveTable.getPartitionNames(), - hiveTable.getPartitions(), - hiveTable.getCompactEffectivePredicate(), - hiveTable.getEnforcedConstraint(), - Optional.of(new HiveBucketHandle( + return HiveTableHandle.buildFrom(hiveTable) + .withBucketHandle(Optional.of(new HiveBucketHandle( bucketHandle.getColumns(), bucketHandle.getBucketingVersion(), bucketHandle.getTableBucketCount(), hivePartitioningHandle.getBucketCount(), - bucketHandle.getSortedBy())), - hiveTable.getBucketFilter(), - hiveTable.getAnalyzePartitionValues(), - ImmutableSet.of(), - ImmutableSet.of(), // Projected columns is used only during optimization phase of planning - hiveTable.getTransaction(), - hiveTable.isRecordScannedFiles(), - hiveTable.getMaxScannedFileSize()); + bucketHandle.getSortedBy()))) + .withConstraintColumns(ImmutableSet.of()) + .withProjectedColumns(ImmutableSet.of()) // Projected columns is used only during optimization phase of planning + .build(); } @VisibleForTesting diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePartitionManager.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePartitionManager.java index 1a285a3dd26b..fc4ca58df8c3 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePartitionManager.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePartitionManager.java @@ -168,24 +168,16 @@ public HiveTableHandle applyPartitionResult(HiveTableHandle handle, HivePartitio List partitionColumns = partitions.getPartitionColumns(); enforcedConstraint = partitions.getEffectivePredicate().filter((column, domain) -> partitionColumns.contains(column)); } - return new HiveTableHandle( - handle.getSchemaName(), - handle.getTableName(), - handle.getTableParameters(), - ImmutableList.copyOf(partitions.getPartitionColumns()), - handle.getDataColumns(), - partitionNames, - partitionList, - partitions.getCompactEffectivePredicate(), - enforcedConstraint, - partitions.getBucketHandle(), - partitions.getBucketFilter(), - handle.getAnalyzePartitionValues(), - Sets.union(handle.getConstraintColumns(), constraint.getPredicateColumns().orElseGet(ImmutableSet::of)), - handle.getProjectedColumns(), - handle.getTransaction(), - handle.isRecordScannedFiles(), - handle.getMaxScannedFileSize()); + return HiveTableHandle.buildFrom(handle) + .withPartitionColumns(ImmutableList.copyOf(partitions.getPartitionColumns())) + .withPartitionNames(partitionNames) + .withPartitions(partitionList) + .withCompactEffectivePredicate(partitions.getCompactEffectivePredicate()) + .withEnforcedConstraint(enforcedConstraint) + .withBucketHandle(partitions.getBucketHandle()) + .withBucketFilter(partitions.getBucketFilter()) + .withConstraintColumns(Sets.union(handle.getConstraintColumns(), constraint.getPredicateColumns().orElseGet(ImmutableSet::of))) + .build(); } public Iterator getPartitions(SemiTransactionalHiveMetastore metastore, HiveTableHandle table) diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableHandle.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableHandle.java index 416a315bdaed..13f95e9fb97f 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableHandle.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableHandle.java @@ -60,7 +60,7 @@ public class HiveTableHandle private final Optional maxScannedFileSize; @JsonCreator - public HiveTableHandle( + public static HiveTableHandle fromJsonForDeserializationOnly( @JsonProperty("schemaName") String schemaName, @JsonProperty("tableName") String tableName, @JsonProperty("partitionColumns") List partitionColumns, @@ -72,55 +72,21 @@ public HiveTableHandle( @JsonProperty("analyzePartitionValues") Optional>> analyzePartitionValues, @JsonProperty("transaction") AcidTransaction transaction) { - this( - schemaName, - tableName, - Optional.empty(), - partitionColumns, - dataColumns, - Optional.empty(), - Optional.empty(), - compactEffectivePredicate, - enforcedConstraint, - bucketHandle, - bucketFilter, - analyzePartitionValues, - ImmutableSet.of(), - ImmutableSet.of(), - transaction, - false, - Optional.empty()); - } - - public HiveTableHandle( - String schemaName, - String tableName, - Map tableParameters, - List partitionColumns, - List dataColumns, - Optional bucketHandle) - { - this( - schemaName, - tableName, - Optional.of(tableParameters), - partitionColumns, - dataColumns, - Optional.empty(), - Optional.empty(), - TupleDomain.all(), - TupleDomain.all(), - bucketHandle, - Optional.empty(), - Optional.empty(), - ImmutableSet.of(), - ImmutableSet.of(), - NO_ACID_TRANSACTION, - false, - Optional.empty()); - } - - public HiveTableHandle( + return builder() + .withSchemaName(schemaName) + .withTableName(tableName) + .withPartitionColumns(partitionColumns) + .withDataColumns(dataColumns) + .withCompactEffectivePredicate(compactEffectivePredicate) + .withEnforcedConstraint(enforcedConstraint) + .withBucketHandle(bucketHandle) + .withBucketFilter(bucketFilter) + .withAnalyzePartitionValues(analyzePartitionValues) + .withTransaction(transaction) + .build(); + } + + private HiveTableHandle( String schemaName, String tableName, Optional> tableParameters, @@ -159,116 +125,6 @@ public HiveTableHandle( this.maxScannedFileSize = requireNonNull(maxSplitFileSize, "maxSplitFileSize is null"); } - public HiveTableHandle withAnalyzePartitionValues(List> analyzePartitionValues) - { - return new HiveTableHandle( - schemaName, - tableName, - tableParameters, - partitionColumns, - dataColumns, - partitionNames, - partitions, - compactEffectivePredicate, - enforcedConstraint, - bucketHandle, - bucketFilter, - Optional.of(analyzePartitionValues), - constraintColumns, - projectedColumns, - transaction, - recordScannedFiles, - maxScannedFileSize); - } - - public HiveTableHandle withTransaction(AcidTransaction transaction) - { - return new HiveTableHandle( - schemaName, - tableName, - tableParameters, - partitionColumns, - dataColumns, - partitionNames, - partitions, - compactEffectivePredicate, - enforcedConstraint, - bucketHandle, - bucketFilter, - analyzePartitionValues, - constraintColumns, - projectedColumns, - transaction, - recordScannedFiles, - maxScannedFileSize); - } - - public HiveTableHandle withProjectedColumns(Set projectedColumns) - { - return new HiveTableHandle( - schemaName, - tableName, - tableParameters, - partitionColumns, - dataColumns, - partitionNames, - partitions, - compactEffectivePredicate, - enforcedConstraint, - bucketHandle, - bucketFilter, - analyzePartitionValues, - constraintColumns, - projectedColumns, - transaction, - recordScannedFiles, - maxScannedFileSize); - } - - public HiveTableHandle withRecordScannedFiles(boolean recordScannedFiles) - { - return new HiveTableHandle( - schemaName, - tableName, - tableParameters, - partitionColumns, - dataColumns, - partitionNames, - partitions, - compactEffectivePredicate, - enforcedConstraint, - bucketHandle, - bucketFilter, - analyzePartitionValues, - constraintColumns, - projectedColumns, - transaction, - recordScannedFiles, - maxScannedFileSize); - } - - public HiveTableHandle withMaxScannedFileSize(Optional maxScannedFileSize) - { - return new HiveTableHandle( - schemaName, - tableName, - tableParameters, - partitionColumns, - dataColumns, - partitionNames, - partitions, - compactEffectivePredicate, - enforcedConstraint, - bucketHandle, - bucketFilter, - analyzePartitionValues, - constraintColumns, - projectedColumns, - transaction, - recordScannedFiles, - maxScannedFileSize); - } - @JsonProperty public String getSchemaName() { @@ -487,4 +343,184 @@ public String toString() }); return builder.toString(); } + + public static Builder builder() + { + return new Builder(); + } + + public static Builder buildFrom(HiveTableHandle table) + { + return new Builder(table); + } + + public static class Builder + { + private String schemaName; + private String tableName; + private Optional> tableParameters = Optional.empty(); + private List partitionColumns = ImmutableList.of(); + private List dataColumns = ImmutableList.of(); + private Optional> partitionNames = Optional.empty(); + private Optional> partitions = Optional.empty(); + private TupleDomain compactEffectivePredicate = TupleDomain.all(); + private TupleDomain enforcedConstraint = TupleDomain.all(); + private Optional bucketHandle = Optional.empty(); + private Optional bucketFilter = Optional.empty(); + private Optional>> analyzePartitionValues = Optional.empty(); + private Set constraintColumns = ImmutableSet.of(); + private Set projectedColumns = ImmutableSet.of(); + private AcidTransaction transaction = NO_ACID_TRANSACTION; + private boolean recordScannedFiles; + private Optional maxScannedFileSize = Optional.empty(); + + private Builder() + { + } + + private Builder(HiveTableHandle table) + { + this.schemaName = table.schemaName; + this.tableName = table.tableName; + this.tableParameters = table.tableParameters; + this.partitionColumns = table.partitionColumns; + this.dataColumns = table.dataColumns; + this.partitionNames = table.partitionNames; + this.partitions = table.partitions; + this.compactEffectivePredicate = table.compactEffectivePredicate; + this.enforcedConstraint = table.enforcedConstraint; + this.bucketHandle = table.bucketHandle; + this.bucketFilter = table.bucketFilter; + this.analyzePartitionValues = table.analyzePartitionValues; + this.constraintColumns = table.constraintColumns; + this.projectedColumns = table.projectedColumns; + this.transaction = table.transaction; + this.recordScannedFiles = table.recordScannedFiles; + this.maxScannedFileSize = table.maxScannedFileSize; + } + + public Builder withSchemaName(String schemaName) + { + this.schemaName = schemaName; + return this; + } + + public Builder withTableName(String tableName) + { + this.tableName = tableName; + return this; + } + + public Builder withTableParameters(Optional> tableParameters) + { + this.tableParameters = tableParameters; + return this; + } + + public Builder withPartitionColumns(List partitionColumns) + { + this.partitionColumns = partitionColumns; + return this; + } + + public Builder withDataColumns(List dataColumns) + { + this.dataColumns = dataColumns; + return this; + } + + public Builder withPartitionNames(Optional> partitionNames) + { + this.partitionNames = partitionNames; + return this; + } + + public Builder withPartitions(Optional> partitions) + { + this.partitions = partitions; + return this; + } + + public Builder withCompactEffectivePredicate(TupleDomain compactEffectivePredicate) + { + this.compactEffectivePredicate = compactEffectivePredicate; + return this; + } + + public Builder withEnforcedConstraint(TupleDomain enforcedConstraint) + { + this.enforcedConstraint = enforcedConstraint; + return this; + } + + public Builder withBucketHandle(Optional bucketHandle) + { + this.bucketHandle = bucketHandle; + return this; + } + + public Builder withBucketFilter(Optional bucketFilter) + { + this.bucketFilter = bucketFilter; + return this; + } + + public Builder withAnalyzePartitionValues(Optional>> analyzePartitionValues) + { + this.analyzePartitionValues = analyzePartitionValues; + return this; + } + + public Builder withConstraintColumns(Set constraintColumns) + { + this.constraintColumns = constraintColumns; + return this; + } + + public Builder withProjectedColumns(Set projectedColumns) + { + this.projectedColumns = projectedColumns; + return this; + } + + public Builder withTransaction(AcidTransaction transaction) + { + this.transaction = transaction; + return this; + } + + public Builder withRecordScannedFiles(boolean recordScannedFiles) + { + this.recordScannedFiles = recordScannedFiles; + return this; + } + + public Builder withMaxScannedFileSize(Optional maxScannedFileSize) + { + this.maxScannedFileSize = maxScannedFileSize; + return this; + } + + public HiveTableHandle build() + { + return new HiveTableHandle( + schemaName, + tableName, + tableParameters, + partitionColumns, + dataColumns, + partitionNames, + partitions, + compactEffectivePredicate, + enforcedConstraint, + bucketHandle, + bucketFilter, + analyzePartitionValues, + constraintColumns, + projectedColumns, + transaction, + recordScannedFiles, + maxScannedFileSize); + } + } } diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/PartitionsSystemTableProvider.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/PartitionsSystemTableProvider.java index 24c8a45d06ec..548efc10153d 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/PartitionsSystemTableProvider.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/PartitionsSystemTableProvider.java @@ -86,13 +86,15 @@ public Optional getSystemTable(HiveMetadata metadata, ConnectorSess return Optional.empty(); } verifyOnline(sourceTableName, Optional.empty(), getProtectMode(sourceTable), sourceTable.getParameters()); - HiveTableHandle sourceTableHandle = new HiveTableHandle( - sourceTableName.getSchemaName(), - sourceTableName.getTableName(), - sourceTable.getParameters(), - getPartitionKeyColumnHandles(sourceTable, typeManager), - getRegularColumnHandles(sourceTable, typeManager, getTimestampPrecision(session)), - getHiveBucketHandle(session, sourceTable, typeManager)); + + HiveTableHandle sourceTableHandle = HiveTableHandle.builder() + .withSchemaName(sourceTableName.getSchemaName()) + .withTableName(sourceTableName.getTableName()) + .withTableParameters(Optional.of(sourceTable.getParameters())) + .withPartitionColumns(getPartitionKeyColumnHandles(sourceTable, typeManager)) + .withDataColumns(getRegularColumnHandles(sourceTable, typeManager, getTimestampPrecision(session))) + .withBucketHandle(getHiveBucketHandle(session, sourceTable, typeManager)) + .build(); List partitionColumns = sourceTableHandle.getPartitionColumns(); if (partitionColumns.isEmpty()) { diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java index e7339589f2a5..3b1f887e2c53 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java @@ -710,7 +710,10 @@ protected void setupHive(String databaseName) tablePartitionSchemaChangeNonCanonical = new SchemaTableName(database, "trino_test_partition_schema_change_non_canonical"); tableBucketEvolution = new SchemaTableName(database, "trino_test_bucket_evolution"); - invalidTableHandle = new HiveTableHandle(database, INVALID_TABLE, ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()); + invalidTableHandle = HiveTableHandle.builder() + .withSchemaName(database) + .withTableName(INVALID_TABLE) + .build(); dsColumn = createBaseColumn("ds", -1, HIVE_STRING, VARCHAR, PARTITION_KEY, Optional.empty()); fileFormatColumn = createBaseColumn("file_format", -1, HIVE_STRING, VARCHAR, PARTITION_KEY, Optional.empty()); @@ -3716,7 +3719,9 @@ public void testApplyProjection() // Extra columns handles in HiveTableHandle should get pruned projectionResult = metadata.applyProjection( session, - ((HiveTableHandle) tableHandle).withProjectedColumns(ImmutableSet.copyOf(columnHandles)), + HiveTableHandle.buildFrom((HiveTableHandle) tableHandle) + .withProjectedColumns(ImmutableSet.copyOf(columnHandles)) + .build(), inputProjections, inputAssignments); assertProjectionResult(projectionResult, false, inputProjections, expectedAssignments); diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHivePageSink.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHivePageSink.java index e420894d7db8..d5e2e3a13b0b 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHivePageSink.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHivePageSink.java @@ -265,7 +265,10 @@ private static ConnectorPageSource createPageSource(HiveTransactionHandle transa Optional.empty(), 0, SplitWeight.standard()); - ConnectorTableHandle table = new HiveTableHandle(SCHEMA_NAME, TABLE_NAME, ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()); + ConnectorTableHandle table = HiveTableHandle.builder() + .withSchemaName(SCHEMA_NAME) + .withTableName(TABLE_NAME) + .build(); HivePageSourceProvider provider = new HivePageSourceProvider( TESTING_TYPE_MANAGER, HDFS_ENVIRONMENT, diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveTableHandle.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveTableHandle.java index f31eb71c4ff3..6a1ba9437a29 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveTableHandle.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveTableHandle.java @@ -13,13 +13,9 @@ */ package io.trino.plugin.hive; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; import io.airlift.json.JsonCodec; import org.testng.annotations.Test; -import java.util.Optional; - import static org.testng.Assert.assertEquals; public class TestHiveTableHandle @@ -29,7 +25,10 @@ public class TestHiveTableHandle @Test public void testRoundTrip() { - HiveTableHandle expected = new HiveTableHandle("schema", "table", ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()); + HiveTableHandle expected = HiveTableHandle.builder() + .withSchemaName("schema") + .withTableName("table") + .build(); String json = codec.toJson(expected); HiveTableHandle actual = codec.fromJson(json); diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestNodeLocalDynamicSplitPruning.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestNodeLocalDynamicSplitPruning.java index 823a8f029885..7eee8607372a 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestNodeLocalDynamicSplitPruning.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestNodeLocalDynamicSplitPruning.java @@ -141,18 +141,17 @@ private static ConnectorPageSource createTestingPageSource(HiveTransactionHandle TableHandle tableHandle = new TableHandle( TEST_CATALOG_HANDLE, - new HiveTableHandle( - SCHEMA_NAME, - TABLE_NAME, - ImmutableMap.of(), - ImmutableList.of(), - ImmutableList.of(BUCKET_HIVE_COLUMN_HANDLE), - Optional.of(new HiveBucketHandle( + HiveTableHandle.builder() + .withSchemaName(SCHEMA_NAME) + .withTableName(TABLE_NAME) + .withDataColumns(ImmutableList.of(BUCKET_HIVE_COLUMN_HANDLE)) + .withBucketHandle(Optional.of(new HiveBucketHandle( ImmutableList.of(BUCKET_HIVE_COLUMN_HANDLE), BUCKETING_V1, 20, 20, - ImmutableList.of()))), + ImmutableList.of()))) + .build(), transaction); HivePageSourceProvider provider = new HivePageSourceProvider( diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/AbstractFileFormat.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/AbstractFileFormat.java index c61aab051f71..608bc8a1f232 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/AbstractFileFormat.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/AbstractFileFormat.java @@ -14,7 +14,6 @@ package io.trino.plugin.hive.benchmark; import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.trino.hdfs.HdfsEnvironment; import io.trino.plugin.hive.GenericHiveRecordCursorProvider; @@ -160,8 +159,12 @@ public ConnectorPageSource createGenericReader( return factory.createPageSource( TestingConnectorTransactionHandle.INSTANCE, - session, split, - new HiveTableHandle("schema_name", "table_name", ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()), + session, + split, + HiveTableHandle.builder() + .withSchemaName("schema_name") + .withTableName("table_name") + .build(), readColumns, DynamicFilter.EMPTY); } diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java index 4d93f627c4e9..8b1627761f98 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java @@ -165,7 +165,10 @@ public void testProjectionPushdown() REGULAR, Optional.empty()); - HiveTableHandle hiveTable = new HiveTableHandle(SCHEMA_NAME, tableName, ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()); + HiveTableHandle hiveTable = HiveTableHandle.builder() + .withSchemaName(SCHEMA_NAME) + .withTableName(tableName) + .build(); TableHandle table = new TableHandle(TEST_CATALOG_HANDLE, hiveTable, new HiveTransactionHandle(false)); HiveColumnHandle fullColumn = partialColumn.getBaseColumn(); @@ -183,7 +186,9 @@ public void testProjectionPushdown() project( ImmutableMap.of("expr", expression("col")), tableScan( - hiveTable.withProjectedColumns(ImmutableSet.of(fullColumn))::equals, + HiveTableHandle.buildFrom(hiveTable) + .withProjectedColumns(ImmutableSet.of(fullColumn)) + .build()::equals, TupleDomain.all(), ImmutableMap.of("col", fullColumn::equals)))); @@ -195,7 +200,9 @@ public void testProjectionPushdown() p.tableScan( new TableHandle( TEST_CATALOG_HANDLE, - hiveTable.withProjectedColumns(ImmutableSet.of(fullColumn)), + HiveTableHandle.buildFrom(hiveTable) + .withProjectedColumns(ImmutableSet.of(fullColumn)) + .build(), new HiveTransactionHandle(false)), ImmutableList.of(p.symbol("struct_of_int", baseType)), ImmutableMap.of(p.symbol("struct_of_int", baseType), fullColumn)))) @@ -214,7 +221,9 @@ public void testProjectionPushdown() .matches(project( ImmutableMap.of("expr_deref", expression(new SymbolReference("struct_of_int#a"))), tableScan( - hiveTable.withProjectedColumns(ImmutableSet.of(partialColumn))::equals, + HiveTableHandle.buildFrom(hiveTable) + .withProjectedColumns(ImmutableSet.of(partialColumn)) + .build()::equals, TupleDomain.all(), ImmutableMap.of("struct_of_int#a", partialColumn::equals)))); @@ -229,7 +238,10 @@ public void testPredicatePushdown() PushPredicateIntoTableScan pushPredicateIntoTableScan = new PushPredicateIntoTableScan(tester().getPlannerContext(), tester().getTypeAnalyzer()); - HiveTableHandle hiveTable = new HiveTableHandle(SCHEMA_NAME, tableName, ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()); + HiveTableHandle hiveTable = HiveTableHandle.builder() + .withSchemaName(SCHEMA_NAME) + .withTableName(tableName) + .build(); TableHandle table = new TableHandle(TEST_CATALOG_HANDLE, hiveTable, new HiveTransactionHandle(false)); HiveColumnHandle column = createBaseColumn("a", 0, HIVE_INT, INTEGER, REGULAR, Optional.empty()); @@ -261,7 +273,10 @@ public void testColumnPruningProjectionPushdown() PruneTableScanColumns pruneTableScanColumns = new PruneTableScanColumns(tester().getMetadata()); - HiveTableHandle hiveTable = new HiveTableHandle(SCHEMA_NAME, tableName, ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()); + HiveTableHandle hiveTable = HiveTableHandle.builder() + .withSchemaName(SCHEMA_NAME) + .withTableName(tableName) + .build(); TableHandle table = new TableHandle(TEST_CATALOG_HANDLE, hiveTable, new HiveTransactionHandle(false)); HiveColumnHandle columnA = createBaseColumn("a", 0, HIVE_INT, INTEGER, REGULAR, Optional.empty()); @@ -284,7 +299,9 @@ public void testColumnPruningProjectionPushdown() strictProject( ImmutableMap.of("expr", expression("COLA")), tableScan( - hiveTable.withProjectedColumns(ImmutableSet.of(columnA))::equals, + HiveTableHandle.buildFrom(hiveTable) + .withProjectedColumns(ImmutableSet.of(columnA)) + .build()::equals, TupleDomain.all(), ImmutableMap.of("COLA", columnA::equals)))); @@ -304,7 +321,10 @@ public void testPushdownWithDuplicateExpressions() tester().getTypeAnalyzer(), new ScalarStatsCalculator(tester().getPlannerContext(), tester().getTypeAnalyzer())); - HiveTableHandle hiveTable = new HiveTableHandle(SCHEMA_NAME, tableName, ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()); + HiveTableHandle hiveTable = HiveTableHandle.builder() + .withSchemaName(SCHEMA_NAME) + .withTableName(tableName) + .build(); TableHandle table = new TableHandle(TEST_CATALOG_HANDLE, hiveTable, new HiveTransactionHandle(false)); HiveColumnHandle bigintColumn = createBaseColumn("just_bigint", 1, toHiveType(BIGINT), BIGINT, REGULAR, Optional.empty()); @@ -341,7 +361,9 @@ public void testPushdownWithDuplicateExpressions() "column_ref", expression("just_bigint_0"), "negated_column_ref", expression("- just_bigint_0")), tableScan( - hiveTable.withProjectedColumns(ImmutableSet.of(bigintColumn))::equals, + HiveTableHandle.buildFrom(hiveTable) + .withProjectedColumns(ImmutableSet.of(bigintColumn)) + .build()::equals, TupleDomain.all(), ImmutableMap.of("just_bigint_0", bigintColumn::equals)))); @@ -365,7 +387,9 @@ public void testPushdownWithDuplicateExpressions() "expr_deref", expression(new SymbolReference("struct_of_bigint#a")), "expr_deref_2", expression(new ArithmeticBinaryExpression(ADD, new SymbolReference("struct_of_bigint#a"), new LongLiteral("2")))), tableScan( - hiveTable.withProjectedColumns(ImmutableSet.of(partialColumn))::equals, + HiveTableHandle.buildFrom(hiveTable) + .withProjectedColumns(ImmutableSet.of(partialColumn)) + .build()::equals, TupleDomain.all(), ImmutableMap.of("struct_of_bigint#a", partialColumn::equals)))); diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java index 96c3d25bde61..0d6fb07f7496 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java @@ -164,8 +164,9 @@ public void testDereferencePushdown() assertPlan( "SELECT col0.x expr_x, col0.y expr_y FROM " + testTable, any(tableScan( - ((HiveTableHandle) tableHandle.get().getConnectorHandle()) - .withProjectedColumns(ImmutableSet.of(columnX, columnY))::equals, + HiveTableHandle.buildFrom((HiveTableHandle) tableHandle.get().getConnectorHandle()) + .withProjectedColumns(ImmutableSet.of(columnX, columnY)) + .build()::equals, TupleDomain.all(), ImmutableMap.of("col0#x", columnX::equals, "col0#y", columnY::equals)))); @@ -226,8 +227,9 @@ public void testDereferencePushdown() .right( anyTree( tableScan( - ((HiveTableHandle) tableHandle.get().getConnectorHandle()) - .withProjectedColumns(ImmutableSet.of(column1Handle))::equals, + HiveTableHandle.buildFrom((HiveTableHandle) tableHandle.get().getConnectorHandle()) + .withProjectedColumns(ImmutableSet.of(column1Handle)) + .build()::equals, TupleDomain.all(), ImmutableMap.of("s_expr_1", column1Handle::equals)))))))); }