diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java index b6eeb44d7226..f041f9ba2dce 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java @@ -482,14 +482,13 @@ public HiveTableHandle getTableHandle(ConnectorSession session, SchemaTableName verifyOnline(tableName, Optional.empty(), getProtectMode(table), table.getParameters()); - return HiveTableHandle.builder() - .withSchemaName(tableName.getSchemaName()) - .withTableName(tableName.getTableName()) - .withTableParameters(Optional.of(table.getParameters())) - .withPartitionColumns(getPartitionKeyColumnHandles(table, typeManager)) - .withDataColumns(getRegularColumnHandles(table, typeManager, getTimestampPrecision(session))) - .withBucketHandle(getHiveBucketHandle(session, table, typeManager)) - .build(); + return new HiveTableHandle( + tableName.getSchemaName(), + tableName.getTableName(), + table.getParameters(), + getPartitionKeyColumnHandles(table, typeManager), + getRegularColumnHandles(table, typeManager, getTimestampPrecision(session)), + getHiveBucketHandle(session, table, typeManager)); } @Override @@ -515,9 +514,7 @@ public ConnectorAnalyzeMetadata getStatisticsCollectionMetadata(ConnectorSession } } - handle = HiveTableHandle.buildFrom(handle) - .withAnalyzePartitionValues(Optional.of(list)) - .build(); + handle = handle.withAnalyzePartitionValues(list); HivePartitionResult partitions = partitionManager.getPartitions(handle, list); handle = partitionManager.applyPartitionResult(handle, partitions, alwaysTrue()); } @@ -1889,9 +1886,7 @@ public ConnectorTableHandle beginUpdate(ConnectorSession session, ConnectorTable HiveUpdateProcessor updateProcessor = new HiveUpdateProcessor(allDataColumns, hiveUpdatedColumns); AcidTransaction transaction = metastore.beginUpdate(session, table, updateProcessor); - HiveTableHandle updateHandle = HiveTableHandle.buildFrom(hiveTableHandle) - .withTransaction(transaction) - .build(); + HiveTableHandle updateHandle = hiveTableHandle.withTransaction(transaction); WriteInfo writeInfo = locationService.getQueryWriteInfo(locationHandle); metastore.declareIntentionToWrite(session, writeInfo.getWriteMode(), writeInfo.getWritePath(), tableName); @@ -1952,11 +1947,7 @@ public ConnectorMergeTableHandle beginMerge(ConnectorSession session, ConnectorT } HiveInsertTableHandle insertHandle = beginInsertOrMerge(session, tableHandle, retryMode, "Merging into", true); - return new HiveMergeTableHandle( - HiveTableHandle.buildFrom(hiveTableHandle) - .withTransaction(insertHandle.getTransaction()) - .build(), - insertHandle); + return new HiveMergeTableHandle(hiveTableHandle.withTransaction(insertHandle.getTransaction()), insertHandle); } @Override @@ -2429,10 +2420,9 @@ private BeginTableExecuteResult( hiveExecuteHandle .withWriteDeclarationId(writeDeclarationId), - HiveTableHandle.buildFrom(hiveSourceTableHandle) + hiveSourceTableHandle .withMaxScannedFileSize(hiveExecuteHandle.getMaxScannedFileSize()) - .withRecordScannedFiles(true) - .build()); + .withRecordScannedFiles(true)); } @Override @@ -2780,9 +2770,7 @@ public ConnectorTableHandle beginDelete(ConnectorSession session, ConnectorTable WriteInfo writeInfo = locationService.getQueryWriteInfo(locationHandle); metastore.declareIntentionToWrite(session, writeInfo.getWriteMode(), writeInfo.getWritePath(), handle.getSchemaTableName()); - return HiveTableHandle.buildFrom(handle) - .withTransaction(transaction) - .build(); + return handle.withTransaction(transaction); } @Override @@ -3054,9 +3042,7 @@ public Optional> applyProjecti ((HiveColumnHandle) assignment.getValue()).getType())) .collect(toImmutableList()); return Optional.of(new ProjectionApplicationResult<>( - HiveTableHandle.buildFrom(hiveTableHandle) - .withProjectedColumns(projectedColumns) - .build(), + hiveTableHandle.withProjectedColumns(projectedColumns), projections, assignmentsList, false)); @@ -3103,9 +3089,7 @@ public Optional> applyProjecti List outputAssignments = ImmutableList.copyOf(newAssignments.values()); return Optional.of(new ProjectionApplicationResult<>( - HiveTableHandle.buildFrom(hiveTableHandle) - .withProjectedColumns(projectedColumnsBuilder.build()) - .build(), + hiveTableHandle.withProjectedColumns(projectedColumnsBuilder.build()), newProjections, outputAssignments, false)); @@ -3229,16 +3213,29 @@ public ConnectorTableHandle makeCompatiblePartitioning(ConnectorSession session, largerBucketCount % smallerBucketCount == 0 && Integer.bitCount(largerBucketCount / smallerBucketCount) == 1, "The requested partitioning is not a valid alternative for the table layout"); - return HiveTableHandle.buildFrom(hiveTable) - .withBucketHandle(Optional.of(new HiveBucketHandle( + return new HiveTableHandle( + hiveTable.getSchemaName(), + hiveTable.getTableName(), + hiveTable.getTableParameters(), + hiveTable.getPartitionColumns(), + hiveTable.getDataColumns(), + hiveTable.getPartitionNames(), + hiveTable.getPartitions(), + hiveTable.getCompactEffectivePredicate(), + hiveTable.getEnforcedConstraint(), + Optional.of(new HiveBucketHandle( bucketHandle.getColumns(), bucketHandle.getBucketingVersion(), bucketHandle.getTableBucketCount(), hivePartitioningHandle.getBucketCount(), - bucketHandle.getSortedBy()))) - .withConstraintColumns(ImmutableSet.of()) - .withProjectedColumns(ImmutableSet.of()) // Projected columns is used only during optimization phase of planning - .build(); + bucketHandle.getSortedBy())), + hiveTable.getBucketFilter(), + hiveTable.getAnalyzePartitionValues(), + ImmutableSet.of(), + ImmutableSet.of(), // Projected columns is used only during optimization phase of planning + hiveTable.getTransaction(), + hiveTable.isRecordScannedFiles(), + hiveTable.getMaxScannedFileSize()); } @VisibleForTesting diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePartitionManager.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePartitionManager.java index fc4ca58df8c3..1a285a3dd26b 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePartitionManager.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HivePartitionManager.java @@ -168,16 +168,24 @@ public HiveTableHandle applyPartitionResult(HiveTableHandle handle, HivePartitio List partitionColumns = partitions.getPartitionColumns(); enforcedConstraint = partitions.getEffectivePredicate().filter((column, domain) -> partitionColumns.contains(column)); } - return HiveTableHandle.buildFrom(handle) - .withPartitionColumns(ImmutableList.copyOf(partitions.getPartitionColumns())) - .withPartitionNames(partitionNames) - .withPartitions(partitionList) - .withCompactEffectivePredicate(partitions.getCompactEffectivePredicate()) - .withEnforcedConstraint(enforcedConstraint) - .withBucketHandle(partitions.getBucketHandle()) - .withBucketFilter(partitions.getBucketFilter()) - .withConstraintColumns(Sets.union(handle.getConstraintColumns(), constraint.getPredicateColumns().orElseGet(ImmutableSet::of))) - .build(); + return new HiveTableHandle( + handle.getSchemaName(), + handle.getTableName(), + handle.getTableParameters(), + ImmutableList.copyOf(partitions.getPartitionColumns()), + handle.getDataColumns(), + partitionNames, + partitionList, + partitions.getCompactEffectivePredicate(), + enforcedConstraint, + partitions.getBucketHandle(), + partitions.getBucketFilter(), + handle.getAnalyzePartitionValues(), + Sets.union(handle.getConstraintColumns(), constraint.getPredicateColumns().orElseGet(ImmutableSet::of)), + handle.getProjectedColumns(), + handle.getTransaction(), + handle.isRecordScannedFiles(), + handle.getMaxScannedFileSize()); } public Iterator getPartitions(SemiTransactionalHiveMetastore metastore, HiveTableHandle table) diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableHandle.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableHandle.java index 13f95e9fb97f..416a315bdaed 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableHandle.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableHandle.java @@ -60,7 +60,7 @@ public class HiveTableHandle private final Optional maxScannedFileSize; @JsonCreator - public static HiveTableHandle fromJsonForDeserializationOnly( + public HiveTableHandle( @JsonProperty("schemaName") String schemaName, @JsonProperty("tableName") String tableName, @JsonProperty("partitionColumns") List partitionColumns, @@ -72,21 +72,55 @@ public static HiveTableHandle fromJsonForDeserializationOnly( @JsonProperty("analyzePartitionValues") Optional>> analyzePartitionValues, @JsonProperty("transaction") AcidTransaction transaction) { - return builder() - .withSchemaName(schemaName) - .withTableName(tableName) - .withPartitionColumns(partitionColumns) - .withDataColumns(dataColumns) - .withCompactEffectivePredicate(compactEffectivePredicate) - .withEnforcedConstraint(enforcedConstraint) - .withBucketHandle(bucketHandle) - .withBucketFilter(bucketFilter) - .withAnalyzePartitionValues(analyzePartitionValues) - .withTransaction(transaction) - .build(); - } - - private HiveTableHandle( + this( + schemaName, + tableName, + Optional.empty(), + partitionColumns, + dataColumns, + Optional.empty(), + Optional.empty(), + compactEffectivePredicate, + enforcedConstraint, + bucketHandle, + bucketFilter, + analyzePartitionValues, + ImmutableSet.of(), + ImmutableSet.of(), + transaction, + false, + Optional.empty()); + } + + public HiveTableHandle( + String schemaName, + String tableName, + Map tableParameters, + List partitionColumns, + List dataColumns, + Optional bucketHandle) + { + this( + schemaName, + tableName, + Optional.of(tableParameters), + partitionColumns, + dataColumns, + Optional.empty(), + Optional.empty(), + TupleDomain.all(), + TupleDomain.all(), + bucketHandle, + Optional.empty(), + Optional.empty(), + ImmutableSet.of(), + ImmutableSet.of(), + NO_ACID_TRANSACTION, + false, + Optional.empty()); + } + + public HiveTableHandle( String schemaName, String tableName, Optional> tableParameters, @@ -125,6 +159,116 @@ private HiveTableHandle( this.maxScannedFileSize = requireNonNull(maxSplitFileSize, "maxSplitFileSize is null"); } + public HiveTableHandle withAnalyzePartitionValues(List> analyzePartitionValues) + { + return new HiveTableHandle( + schemaName, + tableName, + tableParameters, + partitionColumns, + dataColumns, + partitionNames, + partitions, + compactEffectivePredicate, + enforcedConstraint, + bucketHandle, + bucketFilter, + Optional.of(analyzePartitionValues), + constraintColumns, + projectedColumns, + transaction, + recordScannedFiles, + maxScannedFileSize); + } + + public HiveTableHandle withTransaction(AcidTransaction transaction) + { + return new HiveTableHandle( + schemaName, + tableName, + tableParameters, + partitionColumns, + dataColumns, + partitionNames, + partitions, + compactEffectivePredicate, + enforcedConstraint, + bucketHandle, + bucketFilter, + analyzePartitionValues, + constraintColumns, + projectedColumns, + transaction, + recordScannedFiles, + maxScannedFileSize); + } + + public HiveTableHandle withProjectedColumns(Set projectedColumns) + { + return new HiveTableHandle( + schemaName, + tableName, + tableParameters, + partitionColumns, + dataColumns, + partitionNames, + partitions, + compactEffectivePredicate, + enforcedConstraint, + bucketHandle, + bucketFilter, + analyzePartitionValues, + constraintColumns, + projectedColumns, + transaction, + recordScannedFiles, + maxScannedFileSize); + } + + public HiveTableHandle withRecordScannedFiles(boolean recordScannedFiles) + { + return new HiveTableHandle( + schemaName, + tableName, + tableParameters, + partitionColumns, + dataColumns, + partitionNames, + partitions, + compactEffectivePredicate, + enforcedConstraint, + bucketHandle, + bucketFilter, + analyzePartitionValues, + constraintColumns, + projectedColumns, + transaction, + recordScannedFiles, + maxScannedFileSize); + } + + public HiveTableHandle withMaxScannedFileSize(Optional maxScannedFileSize) + { + return new HiveTableHandle( + schemaName, + tableName, + tableParameters, + partitionColumns, + dataColumns, + partitionNames, + partitions, + compactEffectivePredicate, + enforcedConstraint, + bucketHandle, + bucketFilter, + analyzePartitionValues, + constraintColumns, + projectedColumns, + transaction, + recordScannedFiles, + maxScannedFileSize); + } + @JsonProperty public String getSchemaName() { @@ -343,184 +487,4 @@ public String toString() }); return builder.toString(); } - - public static Builder builder() - { - return new Builder(); - } - - public static Builder buildFrom(HiveTableHandle table) - { - return new Builder(table); - } - - public static class Builder - { - private String schemaName; - private String tableName; - private Optional> tableParameters = Optional.empty(); - private List partitionColumns = ImmutableList.of(); - private List dataColumns = ImmutableList.of(); - private Optional> partitionNames = Optional.empty(); - private Optional> partitions = Optional.empty(); - private TupleDomain compactEffectivePredicate = TupleDomain.all(); - private TupleDomain enforcedConstraint = TupleDomain.all(); - private Optional bucketHandle = Optional.empty(); - private Optional bucketFilter = Optional.empty(); - private Optional>> analyzePartitionValues = Optional.empty(); - private Set constraintColumns = ImmutableSet.of(); - private Set projectedColumns = ImmutableSet.of(); - private AcidTransaction transaction = NO_ACID_TRANSACTION; - private boolean recordScannedFiles; - private Optional maxScannedFileSize = Optional.empty(); - - private Builder() - { - } - - private Builder(HiveTableHandle table) - { - this.schemaName = table.schemaName; - this.tableName = table.tableName; - this.tableParameters = table.tableParameters; - this.partitionColumns = table.partitionColumns; - this.dataColumns = table.dataColumns; - this.partitionNames = table.partitionNames; - this.partitions = table.partitions; - this.compactEffectivePredicate = table.compactEffectivePredicate; - this.enforcedConstraint = table.enforcedConstraint; - this.bucketHandle = table.bucketHandle; - this.bucketFilter = table.bucketFilter; - this.analyzePartitionValues = table.analyzePartitionValues; - this.constraintColumns = table.constraintColumns; - this.projectedColumns = table.projectedColumns; - this.transaction = table.transaction; - this.recordScannedFiles = table.recordScannedFiles; - this.maxScannedFileSize = table.maxScannedFileSize; - } - - public Builder withSchemaName(String schemaName) - { - this.schemaName = schemaName; - return this; - } - - public Builder withTableName(String tableName) - { - this.tableName = tableName; - return this; - } - - public Builder withTableParameters(Optional> tableParameters) - { - this.tableParameters = tableParameters; - return this; - } - - public Builder withPartitionColumns(List partitionColumns) - { - this.partitionColumns = partitionColumns; - return this; - } - - public Builder withDataColumns(List dataColumns) - { - this.dataColumns = dataColumns; - return this; - } - - public Builder withPartitionNames(Optional> partitionNames) - { - this.partitionNames = partitionNames; - return this; - } - - public Builder withPartitions(Optional> partitions) - { - this.partitions = partitions; - return this; - } - - public Builder withCompactEffectivePredicate(TupleDomain compactEffectivePredicate) - { - this.compactEffectivePredicate = compactEffectivePredicate; - return this; - } - - public Builder withEnforcedConstraint(TupleDomain enforcedConstraint) - { - this.enforcedConstraint = enforcedConstraint; - return this; - } - - public Builder withBucketHandle(Optional bucketHandle) - { - this.bucketHandle = bucketHandle; - return this; - } - - public Builder withBucketFilter(Optional bucketFilter) - { - this.bucketFilter = bucketFilter; - return this; - } - - public Builder withAnalyzePartitionValues(Optional>> analyzePartitionValues) - { - this.analyzePartitionValues = analyzePartitionValues; - return this; - } - - public Builder withConstraintColumns(Set constraintColumns) - { - this.constraintColumns = constraintColumns; - return this; - } - - public Builder withProjectedColumns(Set projectedColumns) - { - this.projectedColumns = projectedColumns; - return this; - } - - public Builder withTransaction(AcidTransaction transaction) - { - this.transaction = transaction; - return this; - } - - public Builder withRecordScannedFiles(boolean recordScannedFiles) - { - this.recordScannedFiles = recordScannedFiles; - return this; - } - - public Builder withMaxScannedFileSize(Optional maxScannedFileSize) - { - this.maxScannedFileSize = maxScannedFileSize; - return this; - } - - public HiveTableHandle build() - { - return new HiveTableHandle( - schemaName, - tableName, - tableParameters, - partitionColumns, - dataColumns, - partitionNames, - partitions, - compactEffectivePredicate, - enforcedConstraint, - bucketHandle, - bucketFilter, - analyzePartitionValues, - constraintColumns, - projectedColumns, - transaction, - recordScannedFiles, - maxScannedFileSize); - } - } } diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/PartitionsSystemTableProvider.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/PartitionsSystemTableProvider.java index 548efc10153d..24c8a45d06ec 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/PartitionsSystemTableProvider.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/PartitionsSystemTableProvider.java @@ -86,15 +86,13 @@ public Optional getSystemTable(HiveMetadata metadata, ConnectorSess return Optional.empty(); } verifyOnline(sourceTableName, Optional.empty(), getProtectMode(sourceTable), sourceTable.getParameters()); - - HiveTableHandle sourceTableHandle = HiveTableHandle.builder() - .withSchemaName(sourceTableName.getSchemaName()) - .withTableName(sourceTableName.getTableName()) - .withTableParameters(Optional.of(sourceTable.getParameters())) - .withPartitionColumns(getPartitionKeyColumnHandles(sourceTable, typeManager)) - .withDataColumns(getRegularColumnHandles(sourceTable, typeManager, getTimestampPrecision(session))) - .withBucketHandle(getHiveBucketHandle(session, sourceTable, typeManager)) - .build(); + HiveTableHandle sourceTableHandle = new HiveTableHandle( + sourceTableName.getSchemaName(), + sourceTableName.getTableName(), + sourceTable.getParameters(), + getPartitionKeyColumnHandles(sourceTable, typeManager), + getRegularColumnHandles(sourceTable, typeManager, getTimestampPrecision(session)), + getHiveBucketHandle(session, sourceTable, typeManager)); List partitionColumns = sourceTableHandle.getPartitionColumns(); if (partitionColumns.isEmpty()) { diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java index 3b1f887e2c53..e7339589f2a5 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/AbstractTestHive.java @@ -710,10 +710,7 @@ protected void setupHive(String databaseName) tablePartitionSchemaChangeNonCanonical = new SchemaTableName(database, "trino_test_partition_schema_change_non_canonical"); tableBucketEvolution = new SchemaTableName(database, "trino_test_bucket_evolution"); - invalidTableHandle = HiveTableHandle.builder() - .withSchemaName(database) - .withTableName(INVALID_TABLE) - .build(); + invalidTableHandle = new HiveTableHandle(database, INVALID_TABLE, ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()); dsColumn = createBaseColumn("ds", -1, HIVE_STRING, VARCHAR, PARTITION_KEY, Optional.empty()); fileFormatColumn = createBaseColumn("file_format", -1, HIVE_STRING, VARCHAR, PARTITION_KEY, Optional.empty()); @@ -3719,9 +3716,7 @@ public void testApplyProjection() // Extra columns handles in HiveTableHandle should get pruned projectionResult = metadata.applyProjection( session, - HiveTableHandle.buildFrom((HiveTableHandle) tableHandle) - .withProjectedColumns(ImmutableSet.copyOf(columnHandles)) - .build(), + ((HiveTableHandle) tableHandle).withProjectedColumns(ImmutableSet.copyOf(columnHandles)), inputProjections, inputAssignments); assertProjectionResult(projectionResult, false, inputProjections, expectedAssignments); diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHivePageSink.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHivePageSink.java index d5e2e3a13b0b..e420894d7db8 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHivePageSink.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHivePageSink.java @@ -265,10 +265,7 @@ private static ConnectorPageSource createPageSource(HiveTransactionHandle transa Optional.empty(), 0, SplitWeight.standard()); - ConnectorTableHandle table = HiveTableHandle.builder() - .withSchemaName(SCHEMA_NAME) - .withTableName(TABLE_NAME) - .build(); + ConnectorTableHandle table = new HiveTableHandle(SCHEMA_NAME, TABLE_NAME, ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()); HivePageSourceProvider provider = new HivePageSourceProvider( TESTING_TYPE_MANAGER, HDFS_ENVIRONMENT, diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveTableHandle.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveTableHandle.java index 6a1ba9437a29..f31eb71c4ff3 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveTableHandle.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveTableHandle.java @@ -13,9 +13,13 @@ */ package io.trino.plugin.hive; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import io.airlift.json.JsonCodec; import org.testng.annotations.Test; +import java.util.Optional; + import static org.testng.Assert.assertEquals; public class TestHiveTableHandle @@ -25,10 +29,7 @@ public class TestHiveTableHandle @Test public void testRoundTrip() { - HiveTableHandle expected = HiveTableHandle.builder() - .withSchemaName("schema") - .withTableName("table") - .build(); + HiveTableHandle expected = new HiveTableHandle("schema", "table", ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()); String json = codec.toJson(expected); HiveTableHandle actual = codec.fromJson(json); diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestNodeLocalDynamicSplitPruning.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestNodeLocalDynamicSplitPruning.java index 7eee8607372a..823a8f029885 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestNodeLocalDynamicSplitPruning.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestNodeLocalDynamicSplitPruning.java @@ -141,17 +141,18 @@ private static ConnectorPageSource createTestingPageSource(HiveTransactionHandle TableHandle tableHandle = new TableHandle( TEST_CATALOG_HANDLE, - HiveTableHandle.builder() - .withSchemaName(SCHEMA_NAME) - .withTableName(TABLE_NAME) - .withDataColumns(ImmutableList.of(BUCKET_HIVE_COLUMN_HANDLE)) - .withBucketHandle(Optional.of(new HiveBucketHandle( + new HiveTableHandle( + SCHEMA_NAME, + TABLE_NAME, + ImmutableMap.of(), + ImmutableList.of(), + ImmutableList.of(BUCKET_HIVE_COLUMN_HANDLE), + Optional.of(new HiveBucketHandle( ImmutableList.of(BUCKET_HIVE_COLUMN_HANDLE), BUCKETING_V1, 20, 20, - ImmutableList.of()))) - .build(), + ImmutableList.of()))), transaction); HivePageSourceProvider provider = new HivePageSourceProvider( diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/AbstractFileFormat.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/AbstractFileFormat.java index 608bc8a1f232..c61aab051f71 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/AbstractFileFormat.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/benchmark/AbstractFileFormat.java @@ -14,6 +14,7 @@ package io.trino.plugin.hive.benchmark; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.trino.hdfs.HdfsEnvironment; import io.trino.plugin.hive.GenericHiveRecordCursorProvider; @@ -159,12 +160,8 @@ public ConnectorPageSource createGenericReader( return factory.createPageSource( TestingConnectorTransactionHandle.INSTANCE, - session, - split, - HiveTableHandle.builder() - .withSchemaName("schema_name") - .withTableName("table_name") - .build(), + session, split, + new HiveTableHandle("schema_name", "table_name", ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()), readColumns, DynamicFilter.EMPTY); } diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java index 8b1627761f98..4d93f627c4e9 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestConnectorPushdownRulesWithHive.java @@ -165,10 +165,7 @@ public void testProjectionPushdown() REGULAR, Optional.empty()); - HiveTableHandle hiveTable = HiveTableHandle.builder() - .withSchemaName(SCHEMA_NAME) - .withTableName(tableName) - .build(); + HiveTableHandle hiveTable = new HiveTableHandle(SCHEMA_NAME, tableName, ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()); TableHandle table = new TableHandle(TEST_CATALOG_HANDLE, hiveTable, new HiveTransactionHandle(false)); HiveColumnHandle fullColumn = partialColumn.getBaseColumn(); @@ -186,9 +183,7 @@ public void testProjectionPushdown() project( ImmutableMap.of("expr", expression("col")), tableScan( - HiveTableHandle.buildFrom(hiveTable) - .withProjectedColumns(ImmutableSet.of(fullColumn)) - .build()::equals, + hiveTable.withProjectedColumns(ImmutableSet.of(fullColumn))::equals, TupleDomain.all(), ImmutableMap.of("col", fullColumn::equals)))); @@ -200,9 +195,7 @@ public void testProjectionPushdown() p.tableScan( new TableHandle( TEST_CATALOG_HANDLE, - HiveTableHandle.buildFrom(hiveTable) - .withProjectedColumns(ImmutableSet.of(fullColumn)) - .build(), + hiveTable.withProjectedColumns(ImmutableSet.of(fullColumn)), new HiveTransactionHandle(false)), ImmutableList.of(p.symbol("struct_of_int", baseType)), ImmutableMap.of(p.symbol("struct_of_int", baseType), fullColumn)))) @@ -221,9 +214,7 @@ public void testProjectionPushdown() .matches(project( ImmutableMap.of("expr_deref", expression(new SymbolReference("struct_of_int#a"))), tableScan( - HiveTableHandle.buildFrom(hiveTable) - .withProjectedColumns(ImmutableSet.of(partialColumn)) - .build()::equals, + hiveTable.withProjectedColumns(ImmutableSet.of(partialColumn))::equals, TupleDomain.all(), ImmutableMap.of("struct_of_int#a", partialColumn::equals)))); @@ -238,10 +229,7 @@ public void testPredicatePushdown() PushPredicateIntoTableScan pushPredicateIntoTableScan = new PushPredicateIntoTableScan(tester().getPlannerContext(), tester().getTypeAnalyzer()); - HiveTableHandle hiveTable = HiveTableHandle.builder() - .withSchemaName(SCHEMA_NAME) - .withTableName(tableName) - .build(); + HiveTableHandle hiveTable = new HiveTableHandle(SCHEMA_NAME, tableName, ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()); TableHandle table = new TableHandle(TEST_CATALOG_HANDLE, hiveTable, new HiveTransactionHandle(false)); HiveColumnHandle column = createBaseColumn("a", 0, HIVE_INT, INTEGER, REGULAR, Optional.empty()); @@ -273,10 +261,7 @@ public void testColumnPruningProjectionPushdown() PruneTableScanColumns pruneTableScanColumns = new PruneTableScanColumns(tester().getMetadata()); - HiveTableHandle hiveTable = HiveTableHandle.builder() - .withSchemaName(SCHEMA_NAME) - .withTableName(tableName) - .build(); + HiveTableHandle hiveTable = new HiveTableHandle(SCHEMA_NAME, tableName, ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()); TableHandle table = new TableHandle(TEST_CATALOG_HANDLE, hiveTable, new HiveTransactionHandle(false)); HiveColumnHandle columnA = createBaseColumn("a", 0, HIVE_INT, INTEGER, REGULAR, Optional.empty()); @@ -299,9 +284,7 @@ public void testColumnPruningProjectionPushdown() strictProject( ImmutableMap.of("expr", expression("COLA")), tableScan( - HiveTableHandle.buildFrom(hiveTable) - .withProjectedColumns(ImmutableSet.of(columnA)) - .build()::equals, + hiveTable.withProjectedColumns(ImmutableSet.of(columnA))::equals, TupleDomain.all(), ImmutableMap.of("COLA", columnA::equals)))); @@ -321,10 +304,7 @@ public void testPushdownWithDuplicateExpressions() tester().getTypeAnalyzer(), new ScalarStatsCalculator(tester().getPlannerContext(), tester().getTypeAnalyzer())); - HiveTableHandle hiveTable = HiveTableHandle.builder() - .withSchemaName(SCHEMA_NAME) - .withTableName(tableName) - .build(); + HiveTableHandle hiveTable = new HiveTableHandle(SCHEMA_NAME, tableName, ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), Optional.empty()); TableHandle table = new TableHandle(TEST_CATALOG_HANDLE, hiveTable, new HiveTransactionHandle(false)); HiveColumnHandle bigintColumn = createBaseColumn("just_bigint", 1, toHiveType(BIGINT), BIGINT, REGULAR, Optional.empty()); @@ -361,9 +341,7 @@ public void testPushdownWithDuplicateExpressions() "column_ref", expression("just_bigint_0"), "negated_column_ref", expression("- just_bigint_0")), tableScan( - HiveTableHandle.buildFrom(hiveTable) - .withProjectedColumns(ImmutableSet.of(bigintColumn)) - .build()::equals, + hiveTable.withProjectedColumns(ImmutableSet.of(bigintColumn))::equals, TupleDomain.all(), ImmutableMap.of("just_bigint_0", bigintColumn::equals)))); @@ -387,9 +365,7 @@ public void testPushdownWithDuplicateExpressions() "expr_deref", expression(new SymbolReference("struct_of_bigint#a")), "expr_deref_2", expression(new ArithmeticBinaryExpression(ADD, new SymbolReference("struct_of_bigint#a"), new LongLiteral("2")))), tableScan( - HiveTableHandle.buildFrom(hiveTable) - .withProjectedColumns(ImmutableSet.of(partialColumn)) - .build()::equals, + hiveTable.withProjectedColumns(ImmutableSet.of(partialColumn))::equals, TupleDomain.all(), ImmutableMap.of("struct_of_bigint#a", partialColumn::equals)))); diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java index 0d6fb07f7496..96c3d25bde61 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/optimizer/TestHiveProjectionPushdownIntoTableScan.java @@ -164,9 +164,8 @@ public void testDereferencePushdown() assertPlan( "SELECT col0.x expr_x, col0.y expr_y FROM " + testTable, any(tableScan( - HiveTableHandle.buildFrom((HiveTableHandle) tableHandle.get().getConnectorHandle()) - .withProjectedColumns(ImmutableSet.of(columnX, columnY)) - .build()::equals, + ((HiveTableHandle) tableHandle.get().getConnectorHandle()) + .withProjectedColumns(ImmutableSet.of(columnX, columnY))::equals, TupleDomain.all(), ImmutableMap.of("col0#x", columnX::equals, "col0#y", columnY::equals)))); @@ -227,9 +226,8 @@ public void testDereferencePushdown() .right( anyTree( tableScan( - HiveTableHandle.buildFrom((HiveTableHandle) tableHandle.get().getConnectorHandle()) - .withProjectedColumns(ImmutableSet.of(column1Handle)) - .build()::equals, + ((HiveTableHandle) tableHandle.get().getConnectorHandle()) + .withProjectedColumns(ImmutableSet.of(column1Handle))::equals, TupleDomain.all(), ImmutableMap.of("s_expr_1", column1Handle::equals)))))))); }