From 13d3061c95d5dfe077618704eccb2807f3ad2652 Mon Sep 17 00:00:00 2001 From: Mayank Vadariya <48036907+mayankvadariya@users.noreply.github.com> Date: Thu, 12 Dec 2024 09:50:50 -0500 Subject: [PATCH 1/8] Update docker image version to 107 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 907c0c868ac..e7da68940ea 100644 --- a/pom.xml +++ b/pom.xml @@ -191,7 +191,7 @@ 1.12.779 4.17.0 7.7.1 - 106 + 107 1.22 11.0.1 1.15.1 From 07aeee27539c5d3c72b42495c0a05c6d0ffdbc94 Mon Sep 17 00:00:00 2001 From: Mayank Vadariya Date: Fri, 13 Dec 2024 17:20:22 -0500 Subject: [PATCH 2/8] Rename HiveMinioDataLake to Hive3MinioDataLake --- .../plugin/deltalake/BaseDeltaFailureRecoveryTest.java | 4 ++-- .../deltalake/BaseDeltaLakeAwsConnectorSmokeTest.java | 6 +++--- .../plugin/deltalake/BaseDeltaLakeCompatibility.java | 6 +++--- .../io/trino/plugin/deltalake/DeltaLakeQueryRunner.java | 4 ++-- .../java/io/trino/plugin/deltalake/SparkDeltaLake.java | 6 +++--- .../deltalake/TestDeltaLakeCreateTableStatistics.java | 4 ++-- .../io/trino/plugin/deltalake/TestDeltaLakeDelete.java | 6 +++--- .../plugin/deltalake/TestDeltaLakeDynamicFiltering.java | 6 +++--- .../TestDeltaLakeFlushMetadataCacheProcedure.java | 4 ++-- .../TestDeltaLakeSharedHiveMetastoreWithViews.java | 6 +++--- .../io/trino/plugin/deltalake/TestDeltaLakeUpdate.java | 4 ++-- .../io/trino/plugin/deltalake/TestPredicatePushdown.java | 6 +++--- .../java/io/trino/plugin/hive/TestHive3OnDataLake.java | 6 +++--- .../plugin/hive/TestHiveAnalyzeCorruptStatistics.java | 6 +++--- .../hive/TestHiveCustomCatalogConnectorSmokeTest.java | 4 ++-- .../plugin/hive/TestHiveQueryFailureRecoveryTest.java | 6 +++--- .../plugin/hive/TestHiveTaskFailureRecoveryTest.java | 6 +++--- .../{HiveMinioDataLake.java => Hive3MinioDataLake.java} | 8 ++++---- .../hive/metastore/thrift/TestHiveMetastoreCatalogs.java | 6 +++--- .../java/io/trino/plugin/hive/s3/S3HiveQueryRunner.java | 8 ++++---- .../io/trino/plugin/hive/s3/TestHiveS3MinioQueries.java | 6 +++--- .../test/java/io/trino/plugin/hudi/HudiQueryRunner.java | 6 +++--- .../plugin/hudi/TestHudiMinioConnectorSmokeTest.java | 4 ++-- .../iceberg/BaseIcebergMinioConnectorSmokeTest.java | 6 +++--- .../java/io/trino/plugin/iceberg/IcebergQueryRunner.java | 4 ++-- .../hms/TestTrinoHiveCatalogWithHiveMetastore.java | 6 +++--- .../delta/TestDeltaFaultTolerantExecutionTest.java | 4 ++-- 27 files changed, 74 insertions(+), 74 deletions(-) rename plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/{HiveMinioDataLake.java => Hive3MinioDataLake.java} (94%) diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaFailureRecoveryTest.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaFailureRecoveryTest.java index d189e96fa4e..88f7ee2411a 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaFailureRecoveryTest.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaFailureRecoveryTest.java @@ -17,7 +17,7 @@ import io.trino.operator.RetryPolicy; import io.trino.plugin.exchange.filesystem.FileSystemExchangePlugin; import io.trino.plugin.exchange.filesystem.containers.MinioStorage; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.spi.ErrorType; import io.trino.testing.BaseFailureRecoveryTest; import io.trino.testing.QueryRunner; @@ -59,7 +59,7 @@ protected QueryRunner createQueryRunner( Module failureInjectionModule) throws Exception { - HiveMinioDataLake hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName)); + Hive3MinioDataLake hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName)); hiveMinioDataLake.start(); MinioStorage minioStorage = closeAfterClass(new MinioStorage("test-exchange-spooling-" + randomNameSuffix())); minioStorage.start(); diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeAwsConnectorSmokeTest.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeAwsConnectorSmokeTest.java index e7b1529a8f1..8fd2de505c5 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeAwsConnectorSmokeTest.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeAwsConnectorSmokeTest.java @@ -13,8 +13,8 @@ */ package io.trino.plugin.deltalake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.plugin.hive.containers.HiveHadoop; -import io.trino.plugin.hive.containers.HiveMinioDataLake; import io.trino.testing.QueryRunner; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.TestInstance; @@ -29,12 +29,12 @@ public abstract class BaseDeltaLakeAwsConnectorSmokeTest extends BaseDeltaLakeConnectorSmokeTest { - protected HiveMinioDataLake hiveMinioDataLake; + protected Hive3MinioDataLake hiveMinioDataLake; @Override protected HiveHadoop createHiveHadoop() { - hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName)); + hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName)); hiveMinioDataLake.start(); return hiveMinioDataLake.getHiveHadoop(); // closed by superclass } diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeCompatibility.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeCompatibility.java index 9c9cde68f35..4c352a590b5 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeCompatibility.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/BaseDeltaLakeCompatibility.java @@ -13,7 +13,7 @@ */ package io.trino.plugin.deltalake; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.testing.AbstractTestQueryFramework; import io.trino.testing.QueryRunner; import io.trino.tpch.TpchTable; @@ -32,7 +32,7 @@ public abstract class BaseDeltaLakeCompatibility { protected final String bucketName; protected final String resourcePath; - protected HiveMinioDataLake hiveMinioDataLake; + protected Hive3MinioDataLake hiveMinioDataLake; public BaseDeltaLakeCompatibility(String resourcePath) { @@ -44,7 +44,7 @@ public BaseDeltaLakeCompatibility(String resourcePath) protected QueryRunner createQueryRunner() throws Exception { - hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName)); + hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName)); hiveMinioDataLake.start(); QueryRunner queryRunner = DeltaLakeQueryRunner.builder() diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/DeltaLakeQueryRunner.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/DeltaLakeQueryRunner.java index 01df1213f3d..5c2ecb1d36f 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/DeltaLakeQueryRunner.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/DeltaLakeQueryRunner.java @@ -19,8 +19,8 @@ import io.airlift.log.Level; import io.airlift.log.Logger; import io.airlift.log.Logging; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.plugin.hive.containers.HiveHadoop; -import io.trino.plugin.hive.containers.HiveMinioDataLake; import io.trino.plugin.tpch.TpchPlugin; import io.trino.testing.DistributedQueryRunner; import io.trino.testing.QueryRunner; @@ -259,7 +259,7 @@ public static void main(String[] args) { String bucketName = "test-bucket"; - HiveMinioDataLake hiveMinioDataLake = new HiveMinioDataLake(bucketName); + Hive3MinioDataLake hiveMinioDataLake = new Hive3MinioDataLake(bucketName); hiveMinioDataLake.start(); QueryRunner queryRunner = builder() diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/SparkDeltaLake.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/SparkDeltaLake.java index 6ac5e396d83..9742c853c5e 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/SparkDeltaLake.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/SparkDeltaLake.java @@ -14,8 +14,8 @@ package io.trino.plugin.deltalake; import io.trino.plugin.base.util.AutoCloseableCloser; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.plugin.hive.containers.HiveHadoop; -import io.trino.plugin.hive.containers.HiveMinioDataLake; import io.trino.testing.containers.Minio; import org.testcontainers.containers.GenericContainer; @@ -26,11 +26,11 @@ public final class SparkDeltaLake implements AutoCloseable { private final AutoCloseableCloser closer = AutoCloseableCloser.create(); - private final HiveMinioDataLake hiveMinio; + private final Hive3MinioDataLake hiveMinio; public SparkDeltaLake(String bucketName) { - hiveMinio = closer.register(new HiveMinioDataLake(bucketName)); + hiveMinio = closer.register(new Hive3MinioDataLake(bucketName)); hiveMinio.start(); closer.register(new GenericContainer<>("ghcr.io/trinodb/testing/spark3-delta:" + getDockerImagesVersion())) diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeCreateTableStatistics.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeCreateTableStatistics.java index 1263d32ce54..1bace334c40 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeCreateTableStatistics.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeCreateTableStatistics.java @@ -18,7 +18,7 @@ import io.trino.plugin.deltalake.transactionlog.AddFileEntry; import io.trino.plugin.deltalake.transactionlog.TransactionLogAccess; import io.trino.plugin.deltalake.transactionlog.statistics.DeltaLakeFileStatistics; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.spi.type.DateType; import io.trino.spi.type.DecimalType; import io.trino.spi.type.DoubleType; @@ -68,7 +68,7 @@ protected QueryRunner createQueryRunner() throws Exception { this.bucketName = "delta-test-create-table-statistics-" + randomNameSuffix(); - HiveMinioDataLake hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName)); + Hive3MinioDataLake hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName)); hiveMinioDataLake.start(); return DeltaLakeQueryRunner.builder() diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeDelete.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeDelete.java index d616b3ec204..f854110453e 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeDelete.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeDelete.java @@ -14,7 +14,7 @@ package io.trino.plugin.deltalake; import com.google.common.collect.ImmutableSet; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.testing.AbstractTestQueryFramework; import io.trino.testing.QueryRunner; import org.junit.jupiter.api.Test; @@ -31,13 +31,13 @@ public class TestDeltaLakeDelete extends AbstractTestQueryFramework { private final String bucketName = "test-delta-lake-connector-test-" + randomNameSuffix(); - private HiveMinioDataLake hiveMinioDataLake; + private Hive3MinioDataLake hiveMinioDataLake; @Override protected QueryRunner createQueryRunner() throws Exception { - hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName)); + hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName)); hiveMinioDataLake.start(); return DeltaLakeQueryRunner.builder() diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeDynamicFiltering.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeDynamicFiltering.java index ab566111784..4bfe9a7cd8f 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeDynamicFiltering.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeDynamicFiltering.java @@ -22,7 +22,7 @@ import io.trino.metadata.QualifiedObjectName; import io.trino.metadata.Split; import io.trino.metadata.TableHandle; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.security.AllowAllAccessControl; import io.trino.spi.QueryId; import io.trino.spi.connector.ColumnHandle; @@ -60,14 +60,14 @@ public class TestDeltaLakeDynamicFiltering extends AbstractTestQueryFramework { private final String bucketName = "delta-lake-test-dynamic-filtering-" + randomNameSuffix(); - private HiveMinioDataLake hiveMinioDataLake; + private Hive3MinioDataLake hiveMinioDataLake; @Override protected QueryRunner createQueryRunner() throws Exception { verify(new DynamicFilterConfig().isEnableDynamicFiltering(), "this class assumes dynamic filtering is enabled by default"); - hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName)); + hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName)); hiveMinioDataLake.start(); QueryRunner queryRunner = DeltaLakeQueryRunner.builder() diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeFlushMetadataCacheProcedure.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeFlushMetadataCacheProcedure.java index 61d73d09880..68531e7e93c 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeFlushMetadataCacheProcedure.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeFlushMetadataCacheProcedure.java @@ -14,7 +14,7 @@ package io.trino.plugin.deltalake; import io.trino.metastore.HiveMetastore; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore; import io.trino.testing.AbstractTestQueryFramework; import io.trino.testing.QueryRunner; @@ -39,7 +39,7 @@ public class TestDeltaLakeFlushMetadataCacheProcedure protected QueryRunner createQueryRunner() throws Exception { - HiveMinioDataLake hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName, HIVE3_IMAGE)); + Hive3MinioDataLake hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName, HIVE3_IMAGE)); hiveMinioDataLake.start(); metastore = new BridgingHiveMetastore( testingThriftHiveMetastoreBuilder() diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeSharedHiveMetastoreWithViews.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeSharedHiveMetastoreWithViews.java index bef79413f6f..31b8cd778d8 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeSharedHiveMetastoreWithViews.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeSharedHiveMetastoreWithViews.java @@ -15,7 +15,7 @@ import com.google.common.collect.ImmutableMap; import io.trino.plugin.hive.TestingHivePlugin; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.testing.AbstractTestQueryFramework; import io.trino.testing.QueryRunner; import org.junit.jupiter.api.AfterAll; @@ -35,14 +35,14 @@ public class TestDeltaLakeSharedHiveMetastoreWithViews extends AbstractTestQueryFramework { private final String bucketName = "delta-lake-shared-hive-with-views-" + randomNameSuffix(); - private HiveMinioDataLake hiveMinioDataLake; + private Hive3MinioDataLake hiveMinioDataLake; private String schema; @Override protected QueryRunner createQueryRunner() throws Exception { - this.hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName)); + this.hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName)); this.hiveMinioDataLake.start(); QueryRunner queryRunner = DeltaLakeQueryRunner.builder() diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeUpdate.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeUpdate.java index 857f15b0c2c..6c1cab6c098 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeUpdate.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeUpdate.java @@ -13,7 +13,7 @@ */ package io.trino.plugin.deltalake; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.testing.AbstractTestQueryFramework; import io.trino.testing.QueryRunner; import org.junit.jupiter.api.Test; @@ -35,7 +35,7 @@ public TestDeltaLakeUpdate() protected QueryRunner createQueryRunner() throws Exception { - HiveMinioDataLake hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName)); + Hive3MinioDataLake hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName)); hiveMinioDataLake.start(); return DeltaLakeQueryRunner.builder() diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestPredicatePushdown.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestPredicatePushdown.java index 0d6bf361043..e6b909b93aa 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestPredicatePushdown.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestPredicatePushdown.java @@ -16,7 +16,7 @@ import com.google.common.collect.ContiguousSet; import io.trino.Session; import io.trino.operator.OperatorStats; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.spi.QueryId; import io.trino.testing.AbstractTestQueryFramework; import io.trino.testing.MaterializedResult; @@ -48,13 +48,13 @@ public class TestPredicatePushdown */ private final TableResource testTable = new TableResource("custkey_15rowgroups"); - private HiveMinioDataLake hiveMinioDataLake; + private Hive3MinioDataLake hiveMinioDataLake; @Override protected QueryRunner createQueryRunner() throws Exception { - hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName)); + hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName)); hiveMinioDataLake.start(); return DeltaLakeQueryRunner.builder() diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHive3OnDataLake.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHive3OnDataLake.java index 558a45059a5..13b4b768200 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHive3OnDataLake.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHive3OnDataLake.java @@ -25,8 +25,8 @@ import io.trino.metastore.PartitionStatistics; import io.trino.metastore.PartitionWithStatistics; import io.trino.metastore.Table; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.plugin.hive.containers.HiveHadoop; -import io.trino.plugin.hive.containers.HiveMinioDataLake; import io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore; import io.trino.plugin.hive.s3.S3HiveQueryRunner; import io.trino.spi.connector.SchemaTableName; @@ -80,7 +80,7 @@ public class TestHive3OnDataLake private static final DataSize HIVE_S3_STREAMING_PART_SIZE = DataSize.of(5, MEGABYTE); private String bucketName; - private HiveMinioDataLake hiveMinioDataLake; + private Hive3MinioDataLake hiveMinioDataLake; private HiveMetastore metastoreClient; @Override @@ -89,7 +89,7 @@ protected QueryRunner createQueryRunner() { this.bucketName = "test-hive-insert-overwrite-" + randomNameSuffix(); this.hiveMinioDataLake = closeAfterClass( - new HiveMinioDataLake(bucketName, HiveHadoop.HIVE3_IMAGE)); + new Hive3MinioDataLake(bucketName, HiveHadoop.HIVE3_IMAGE)); this.hiveMinioDataLake.start(); this.metastoreClient = new BridgingHiveMetastore( testingThriftHiveMetastoreBuilder() diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveAnalyzeCorruptStatistics.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveAnalyzeCorruptStatistics.java index f986576683d..0f5fc357ea6 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveAnalyzeCorruptStatistics.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveAnalyzeCorruptStatistics.java @@ -14,7 +14,7 @@ package io.trino.plugin.hive; import io.airlift.units.Duration; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.plugin.hive.s3.S3HiveQueryRunner; import io.trino.testing.AbstractTestQueryFramework; import io.trino.testing.QueryRunner; @@ -28,13 +28,13 @@ public class TestHiveAnalyzeCorruptStatistics extends AbstractTestQueryFramework { - private HiveMinioDataLake hiveMinioDataLake; + private Hive3MinioDataLake hiveMinioDataLake; @Override protected QueryRunner createQueryRunner() throws Exception { - hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake("test-analyze")); + hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake("test-analyze")); hiveMinioDataLake.start(); return S3HiveQueryRunner.builder(hiveMinioDataLake) diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveCustomCatalogConnectorSmokeTest.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveCustomCatalogConnectorSmokeTest.java index 35433d3610d..24cb40038a9 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveCustomCatalogConnectorSmokeTest.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveCustomCatalogConnectorSmokeTest.java @@ -15,8 +15,8 @@ import io.trino.metastore.Database; import io.trino.metastore.HiveMetastore; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.plugin.hive.containers.HiveHadoop; -import io.trino.plugin.hive.containers.HiveMinioDataLake; import io.trino.plugin.hive.metastore.HiveMetastoreFactory; import io.trino.spi.security.PrincipalType; import io.trino.testing.BaseConnectorSmokeTest; @@ -49,7 +49,7 @@ protected QueryRunner createQueryRunner() throws Exception { String bucketName = "test-hive-metastore-catalog-smoke-test-" + randomNameSuffix(); - HiveMinioDataLake hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName, HiveHadoop.HIVE3_IMAGE)); + Hive3MinioDataLake hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName, HiveHadoop.HIVE3_IMAGE)); hiveMinioDataLake.start(); // Inserting into metastore's database directly because the Hive does not expose a way to create a custom catalog diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveQueryFailureRecoveryTest.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveQueryFailureRecoveryTest.java index 54b05e04f83..fd36b81605e 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveQueryFailureRecoveryTest.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveQueryFailureRecoveryTest.java @@ -17,7 +17,7 @@ import io.trino.operator.RetryPolicy; import io.trino.plugin.exchange.filesystem.FileSystemExchangePlugin; import io.trino.plugin.exchange.filesystem.containers.MinioStorage; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.plugin.hive.s3.S3HiveQueryRunner; import io.trino.testing.QueryRunner; import io.trino.tpch.TpchTable; @@ -43,7 +43,7 @@ public TestHiveQueryFailureRecoveryTest() super(RetryPolicy.QUERY); } - private HiveMinioDataLake hiveMinioDataLake; + private Hive3MinioDataLake hiveMinioDataLake; private MinioStorage minioStorage; @Override @@ -55,7 +55,7 @@ protected QueryRunner createQueryRunner( throws Exception { String bucketName = "test-hive-insert-overwrite-" + randomNameSuffix(); // randomizing bucket name to ensure cached TrinoS3FileSystem objects are not reused - this.hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName)); + this.hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName)); hiveMinioDataLake.start(); this.minioStorage = closeAfterClass(new MinioStorage("test-exchange-spooling-" + randomNameSuffix())); diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveTaskFailureRecoveryTest.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveTaskFailureRecoveryTest.java index a74f29d305d..cca5e7a009c 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveTaskFailureRecoveryTest.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveTaskFailureRecoveryTest.java @@ -17,7 +17,7 @@ import io.trino.operator.RetryPolicy; import io.trino.plugin.exchange.filesystem.FileSystemExchangePlugin; import io.trino.plugin.exchange.filesystem.containers.MinioStorage; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.plugin.hive.s3.S3HiveQueryRunner; import io.trino.testing.QueryRunner; import io.trino.tpch.TpchTable; @@ -43,7 +43,7 @@ public TestHiveTaskFailureRecoveryTest() super(RetryPolicy.TASK); } - private HiveMinioDataLake hiveMinioDataLake; + private Hive3MinioDataLake hiveMinioDataLake; private MinioStorage minioStorage; @Override @@ -55,7 +55,7 @@ protected QueryRunner createQueryRunner( throws Exception { String bucketName = "test-hive-insert-overwrite-" + randomNameSuffix(); // randomizing bucket name to ensure cached TrinoS3FileSystem objects are not reused - this.hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName)); + this.hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName)); hiveMinioDataLake.start(); this.minioStorage = closeAfterClass(new MinioStorage("test-exchange-spooling-" + randomNameSuffix())); diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/HiveMinioDataLake.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive3MinioDataLake.java similarity index 94% rename from plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/HiveMinioDataLake.java rename to plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive3MinioDataLake.java index 0ce79c2ff6a..0ec3034e08b 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/HiveMinioDataLake.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive3MinioDataLake.java @@ -29,7 +29,7 @@ import static java.util.Objects.requireNonNull; import static org.testcontainers.containers.Network.newNetwork; -public class HiveMinioDataLake +public class Hive3MinioDataLake implements AutoCloseable { /** @@ -49,17 +49,17 @@ public class HiveMinioDataLake private State state = State.INITIAL; private MinioClient minioClient; - public HiveMinioDataLake(String bucketName) + public Hive3MinioDataLake(String bucketName) { this(bucketName, HiveHadoop.HIVE3_IMAGE); } - public HiveMinioDataLake(String bucketName, String hiveHadoopImage) + public Hive3MinioDataLake(String bucketName, String hiveHadoopImage) { this(bucketName, ImmutableMap.of("/etc/hadoop/conf/core-site.xml", getPathFromClassPathResource("hive_minio_datalake/hive-core-site.xml")), hiveHadoopImage); } - public HiveMinioDataLake(String bucketName, Map hiveHadoopFilesToMount, String hiveHadoopImage) + public Hive3MinioDataLake(String bucketName, Map hiveHadoopFilesToMount, String hiveHadoopImage) { this.bucketName = requireNonNull(bucketName, "bucketName is null"); network = closer.register(newNetwork()); diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestHiveMetastoreCatalogs.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestHiveMetastoreCatalogs.java index 1fa641e5195..cb6736f4b6b 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestHiveMetastoreCatalogs.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestHiveMetastoreCatalogs.java @@ -18,8 +18,8 @@ import io.trino.metastore.Database; import io.trino.metastore.HiveMetastore; import io.trino.plugin.hive.HiveQueryRunner; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.plugin.hive.containers.HiveHadoop; -import io.trino.plugin.hive.containers.HiveMinioDataLake; import io.trino.plugin.hive.metastore.HiveMetastoreFactory; import io.trino.spi.security.PrincipalType; import io.trino.testing.AbstractTestQueryFramework; @@ -52,7 +52,7 @@ protected QueryRunner createQueryRunner() throws Exception { this.bucketName = "test-hive-metastore-catalogs-" + randomNameSuffix(); - HiveMinioDataLake hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName, HiveHadoop.HIVE3_IMAGE)); + Hive3MinioDataLake hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName, HiveHadoop.HIVE3_IMAGE)); hiveMinioDataLake.start(); QueryRunner queryRunner = HiveQueryRunner.builder() @@ -75,7 +75,7 @@ protected QueryRunner createQueryRunner() return queryRunner; } - private static Map buildHiveProperties(HiveMinioDataLake hiveMinioDataLake) + private static Map buildHiveProperties(Hive3MinioDataLake hiveMinioDataLake) { return ImmutableMap.builder() .put("hive.metastore", "thrift") diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/S3HiveQueryRunner.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/S3HiveQueryRunner.java index 9df3a85934b..450acfda9c5 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/S3HiveQueryRunner.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/S3HiveQueryRunner.java @@ -19,7 +19,7 @@ import io.airlift.log.Logging; import io.airlift.units.Duration; import io.trino.plugin.hive.HiveQueryRunner; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore; import io.trino.plugin.hive.metastore.thrift.TestingTokenAwareMetastoreClientFactory; import io.trino.plugin.hive.metastore.thrift.ThriftMetastoreConfig; @@ -47,7 +47,7 @@ public final class S3HiveQueryRunner private S3HiveQueryRunner() {} public static QueryRunner create( - HiveMinioDataLake hiveMinioDataLake, + Hive3MinioDataLake hiveMinioDataLake, Map additionalHiveProperties) throws Exception { @@ -56,7 +56,7 @@ public static QueryRunner create( .build(); } - public static Builder builder(HiveMinioDataLake hiveMinioDataLake) + public static Builder builder(Hive3MinioDataLake hiveMinioDataLake) { return builder() .setHiveMetastoreEndpoint(hiveMinioDataLake.getHiveHadoop().getHiveMetastoreEndpoint()) @@ -173,7 +173,7 @@ public DistributedQueryRunner build() public static void main(String[] args) throws Exception { - HiveMinioDataLake hiveMinioDataLake = new HiveMinioDataLake("tpch"); + Hive3MinioDataLake hiveMinioDataLake = new Hive3MinioDataLake("tpch"); hiveMinioDataLake.start(); QueryRunner queryRunner = S3HiveQueryRunner.builder(hiveMinioDataLake) diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/TestHiveS3MinioQueries.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/TestHiveS3MinioQueries.java index 8c0fbaf5b09..8c8d6c172a1 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/TestHiveS3MinioQueries.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/TestHiveS3MinioQueries.java @@ -14,7 +14,7 @@ package io.trino.plugin.hive.s3; import com.google.common.collect.ImmutableMap; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.testing.AbstractTestQueryFramework; import io.trino.testing.QueryRunner; import org.junit.jupiter.api.Test; @@ -31,7 +31,7 @@ public class TestHiveS3MinioQueries extends AbstractTestQueryFramework { - private HiveMinioDataLake hiveMinioDataLake; + private Hive3MinioDataLake hiveMinioDataLake; private String bucketName; @Override @@ -39,7 +39,7 @@ protected QueryRunner createQueryRunner() throws Exception { this.bucketName = "test-hive-minio-queries-" + randomNameSuffix(); - this.hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName)); + this.hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName)); this.hiveMinioDataLake.start(); return S3HiveQueryRunner.builder(hiveMinioDataLake) diff --git a/plugin/trino-hudi/src/test/java/io/trino/plugin/hudi/HudiQueryRunner.java b/plugin/trino-hudi/src/test/java/io/trino/plugin/hudi/HudiQueryRunner.java index 8f817c21fab..915ca2b9ead 100644 --- a/plugin/trino-hudi/src/test/java/io/trino/plugin/hudi/HudiQueryRunner.java +++ b/plugin/trino-hudi/src/test/java/io/trino/plugin/hudi/HudiQueryRunner.java @@ -20,7 +20,7 @@ import io.trino.filesystem.Location; import io.trino.metastore.Database; import io.trino.plugin.base.util.Closables; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.plugin.hive.metastore.HiveMetastoreFactory; import io.trino.plugin.hudi.testing.HudiTablesInitializer; import io.trino.plugin.hudi.testing.ResourceHudiTablesInitializer; @@ -56,7 +56,7 @@ public static Builder builder() return new Builder("local:///"); } - public static Builder builder(HiveMinioDataLake hiveMinioDataLake) + public static Builder builder(Hive3MinioDataLake hiveMinioDataLake) { return new Builder("s3://" + hiveMinioDataLake.getBucketName() + "/") .addConnectorProperty("fs.hadoop.enabled", "false") @@ -157,7 +157,7 @@ public static void main(String[] args) Logging.initialize(); Logger log = Logger.get(HudiMinioQueryRunnerMain.class); - HiveMinioDataLake hiveMinioDataLake = new HiveMinioDataLake("test-bucket"); + Hive3MinioDataLake hiveMinioDataLake = new Hive3MinioDataLake("test-bucket"); hiveMinioDataLake.start(); QueryRunner queryRunner = builder(hiveMinioDataLake) .addCoordinatorProperty("http-server.http.port", "8080") diff --git a/plugin/trino-hudi/src/test/java/io/trino/plugin/hudi/TestHudiMinioConnectorSmokeTest.java b/plugin/trino-hudi/src/test/java/io/trino/plugin/hudi/TestHudiMinioConnectorSmokeTest.java index 069fb05c052..5fa1332073c 100644 --- a/plugin/trino-hudi/src/test/java/io/trino/plugin/hudi/TestHudiMinioConnectorSmokeTest.java +++ b/plugin/trino-hudi/src/test/java/io/trino/plugin/hudi/TestHudiMinioConnectorSmokeTest.java @@ -13,7 +13,7 @@ */ package io.trino.plugin.hudi; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.plugin.hudi.testing.TpchHudiTablesInitializer; import io.trino.testing.QueryRunner; @@ -29,7 +29,7 @@ protected QueryRunner createQueryRunner() throws Exception { String bucketName = "test-hudi-connector-" + randomNameSuffix(); - HiveMinioDataLake hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName, HIVE3_IMAGE)); + Hive3MinioDataLake hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName, HIVE3_IMAGE)); hiveMinioDataLake.start(); hiveMinioDataLake.getMinioClient().ensureBucketExists(bucketName); diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergMinioConnectorSmokeTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergMinioConnectorSmokeTest.java index 4fdeb620b74..03aa104bd33 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergMinioConnectorSmokeTest.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergMinioConnectorSmokeTest.java @@ -17,7 +17,7 @@ import io.minio.messages.Event; import io.trino.Session; import io.trino.metastore.HiveMetastore; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore; import io.trino.testing.QueryRunner; import io.trino.testing.minio.MinioClient; @@ -50,7 +50,7 @@ public abstract class BaseIcebergMinioConnectorSmokeTest private final String schemaName; private final String bucketName; - private HiveMinioDataLake hiveMinioDataLake; + private Hive3MinioDataLake hiveMinioDataLake; protected BaseIcebergMinioConnectorSmokeTest(FileFormat format) { @@ -63,7 +63,7 @@ protected BaseIcebergMinioConnectorSmokeTest(FileFormat format) protected QueryRunner createQueryRunner() throws Exception { - this.hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName)); + this.hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName)); this.hiveMinioDataLake.start(); return IcebergQueryRunner.builder() diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/IcebergQueryRunner.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/IcebergQueryRunner.java index 698658916e7..ff7664af0ee 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/IcebergQueryRunner.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/IcebergQueryRunner.java @@ -21,8 +21,8 @@ import io.airlift.log.Logger; import io.airlift.log.Logging; import io.trino.plugin.exchange.filesystem.FileSystemExchangePlugin; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.plugin.hive.containers.HiveHadoop; -import io.trino.plugin.hive.containers.HiveMinioDataLake; import io.trino.plugin.iceberg.catalog.jdbc.TestingIcebergJdbcServer; import io.trino.plugin.iceberg.catalog.rest.TestingPolarisCatalog; import io.trino.plugin.iceberg.containers.NessieContainer; @@ -319,7 +319,7 @@ public static void main(String[] args) { String bucketName = "test-bucket"; @SuppressWarnings("resource") - HiveMinioDataLake hiveMinioDataLake = new HiveMinioDataLake(bucketName); + Hive3MinioDataLake hiveMinioDataLake = new Hive3MinioDataLake(bucketName); hiveMinioDataLake.start(); @SuppressWarnings("resource") diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHiveCatalogWithHiveMetastore.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHiveCatalogWithHiveMetastore.java index 587f63d2acc..56b1876bc77 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHiveCatalogWithHiveMetastore.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHiveCatalogWithHiveMetastore.java @@ -33,7 +33,7 @@ import io.trino.metastore.TableInfo; import io.trino.plugin.base.util.AutoCloseableCloser; import io.trino.plugin.hive.TrinoViewHiveMetastore; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.plugin.hive.metastore.cache.CachingHiveMetastore; import io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore; import io.trino.plugin.hive.metastore.thrift.ThriftMetastore; @@ -89,7 +89,7 @@ public class TestTrinoHiveCatalogWithHiveMetastore private AutoCloseableCloser closer = AutoCloseableCloser.create(); // Use MinIO for storage, since HDFS is hard to get working in a unit test - private HiveMinioDataLake dataLake; + private Hive3MinioDataLake dataLake; private TrinoFileSystem fileSystem; private String bucketName; @@ -97,7 +97,7 @@ public class TestTrinoHiveCatalogWithHiveMetastore public void setUp() { bucketName = "test-hive-catalog-with-hms-" + randomNameSuffix(); - dataLake = closer.register(new HiveMinioDataLake(bucketName, HIVE3_IMAGE)); + dataLake = closer.register(new Hive3MinioDataLake(bucketName, HIVE3_IMAGE)); dataLake.start(); } diff --git a/testing/trino-faulttolerant-tests/src/test/java/io/trino/faulttolerant/delta/TestDeltaFaultTolerantExecutionTest.java b/testing/trino-faulttolerant-tests/src/test/java/io/trino/faulttolerant/delta/TestDeltaFaultTolerantExecutionTest.java index 0aeee0ac734..bcffb9655fb 100644 --- a/testing/trino-faulttolerant-tests/src/test/java/io/trino/faulttolerant/delta/TestDeltaFaultTolerantExecutionTest.java +++ b/testing/trino-faulttolerant-tests/src/test/java/io/trino/faulttolerant/delta/TestDeltaFaultTolerantExecutionTest.java @@ -17,7 +17,7 @@ import io.trino.plugin.deltalake.DeltaLakeQueryRunner; import io.trino.plugin.exchange.filesystem.FileSystemExchangePlugin; import io.trino.plugin.exchange.filesystem.containers.MinioStorage; -import io.trino.plugin.hive.containers.HiveMinioDataLake; +import io.trino.plugin.hive.containers.Hive3MinioDataLake; import io.trino.testing.FaultTolerantExecutionConnectorTestHelper; import io.trino.testing.QueryRunner; @@ -38,7 +38,7 @@ public TestDeltaFaultTolerantExecutionTest() protected QueryRunner createQueryRunner() throws Exception { - HiveMinioDataLake hiveMinioDataLake = closeAfterClass(new HiveMinioDataLake(bucketName)); + Hive3MinioDataLake hiveMinioDataLake = closeAfterClass(new Hive3MinioDataLake(bucketName)); hiveMinioDataLake.start(); MinioStorage minioStorage = closeAfterClass(new MinioStorage(bucketName)); minioStorage.start(); From bb15f6af3b82a44f52dcd1360b17668a5488a344 Mon Sep 17 00:00:00 2001 From: Mayank Vadariya <48036907+mayankvadariya@users.noreply.github.com> Date: Thu, 12 Dec 2024 16:27:34 -0500 Subject: [PATCH 3/8] Extract HiveMinioDataLake class --- ...tDeltaLakeFlushMetadataCacheProcedure.java | 2 +- ...DeltaLakeSharedHiveMetastoreWithViews.java | 6 +- ...stHiveCustomCatalogConnectorSmokeTest.java | 2 +- .../hive/containers/Hive3MinioDataLake.java | 108 ++------------ .../hive/containers/HiveMinioDataLake.java | 135 ++++++++++++++++++ .../thrift/TestHiveMetastoreCatalogs.java | 2 +- .../plugin/hive/s3/S3HiveQueryRunner.java | 2 +- .../BaseIcebergMinioConnectorSmokeTest.java | 6 +- ...TestTrinoHiveCatalogWithHiveMetastore.java | 2 +- 9 files changed, 159 insertions(+), 106 deletions(-) create mode 100644 plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/HiveMinioDataLake.java diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeFlushMetadataCacheProcedure.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeFlushMetadataCacheProcedure.java index 68531e7e93c..615c6ab50f8 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeFlushMetadataCacheProcedure.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeFlushMetadataCacheProcedure.java @@ -43,7 +43,7 @@ protected QueryRunner createQueryRunner() hiveMinioDataLake.start(); metastore = new BridgingHiveMetastore( testingThriftHiveMetastoreBuilder() - .metastoreClient(hiveMinioDataLake.getHiveHadoop().getHiveMetastoreEndpoint()) + .metastoreClient(hiveMinioDataLake.getHiveMetastoreEndpoint()) .build(this::closeAfterClass)); return DeltaLakeQueryRunner.builder("default") diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeSharedHiveMetastoreWithViews.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeSharedHiveMetastoreWithViews.java index 31b8cd778d8..bcd9da3bd21 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeSharedHiveMetastoreWithViews.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeSharedHiveMetastoreWithViews.java @@ -54,7 +54,7 @@ protected QueryRunner createQueryRunner() queryRunner.installPlugin(new TestingHivePlugin(queryRunner.getCoordinator().getBaseDataDir().resolve("hive_data"))); queryRunner.createCatalog("hive", "hive", ImmutableMap.builder() .put("hive.metastore", "thrift") - .put("hive.metastore.uri", hiveMinioDataLake.getHiveHadoop().getHiveMetastoreEndpoint().toString()) + .put("hive.metastore.uri", hiveMinioDataLake.getHiveMetastoreEndpoint().toString()) .put("fs.hadoop.enabled", "false") .put("fs.native-s3.enabled", "true") .put("s3.aws-access-key", MINIO_ACCESS_KEY) @@ -67,7 +67,7 @@ protected QueryRunner createQueryRunner() schema = queryRunner.getDefaultSession().getSchema().orElseThrow(); queryRunner.execute("CREATE TABLE hive." + schema + ".hive_table (a_integer integer)"); - hiveMinioDataLake.getHiveHadoop().runOnHive("CREATE VIEW " + schema + ".hive_view AS SELECT * FROM " + schema + ".hive_table"); + hiveMinioDataLake.runOnHive("CREATE VIEW " + schema + ".hive_view AS SELECT * FROM " + schema + ".hive_table"); queryRunner.execute("CREATE TABLE delta." + schema + ".delta_table (a_varchar varchar)"); return queryRunner; @@ -82,7 +82,7 @@ protected QueryRunner createQueryRunner() public void cleanup() { assertQuerySucceeds("DROP TABLE IF EXISTS hive." + schema + ".hive_table"); - hiveMinioDataLake.getHiveHadoop().runOnHive("DROP VIEW IF EXISTS " + schema + ".hive_view"); + hiveMinioDataLake.runOnHive("DROP VIEW IF EXISTS " + schema + ".hive_view"); assertQuerySucceeds("DROP TABLE IF EXISTS delta." + schema + ".delta_table"); assertQuerySucceeds("DROP SCHEMA IF EXISTS hive." + schema); } diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveCustomCatalogConnectorSmokeTest.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveCustomCatalogConnectorSmokeTest.java index 24cb40038a9..6d55f9cd915 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveCustomCatalogConnectorSmokeTest.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHiveCustomCatalogConnectorSmokeTest.java @@ -57,7 +57,7 @@ protected QueryRunner createQueryRunner() QueryRunner queryRunner = HiveQueryRunner.builder() .addHiveProperty("hive.metastore", "thrift") - .addHiveProperty("hive.metastore.uri", hiveMinioDataLake.getHiveHadoop().getHiveMetastoreEndpoint().toString()) + .addHiveProperty("hive.metastore.uri", hiveMinioDataLake.getHiveMetastoreEndpoint().toString()) .addHiveProperty("hive.metastore.thrift.catalog-name", HIVE_CUSTOM_CATALOG) .addHiveProperty("fs.hadoop.enabled", "false") .addHiveProperty("fs.native-s3.enabled", "true") diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive3MinioDataLake.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive3MinioDataLake.java index 0ec3034e08b..840e18d55bf 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive3MinioDataLake.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive3MinioDataLake.java @@ -14,41 +14,18 @@ package io.trino.plugin.hive.containers; import com.google.common.collect.ImmutableMap; -import io.trino.plugin.base.util.AutoCloseableCloser; -import io.trino.testing.containers.Minio; -import io.trino.testing.minio.MinioClient; -import org.testcontainers.containers.Network; -import java.util.List; +import java.net.URI; import java.util.Map; -import static com.google.common.base.Preconditions.checkState; -import static io.trino.testing.containers.Minio.MINIO_ACCESS_KEY; -import static io.trino.testing.containers.Minio.MINIO_SECRET_KEY; +import static io.trino.plugin.hive.containers.HiveMinioDataLake.State.STARTED; import static io.trino.testing.containers.TestContainers.getPathFromClassPathResource; -import static java.util.Objects.requireNonNull; -import static org.testcontainers.containers.Network.newNetwork; public class Hive3MinioDataLake - implements AutoCloseable + extends HiveMinioDataLake { - /** - * In S3 this region is implicitly the default one. In Minio, however, - * if we set an empty region, it will accept any. - * So setting it by default to `us-east-1` simulates S3 better - */ - public static final String MINIO_DEFAULT_REGION = "us-east-1"; - - private final String bucketName; - private final Minio minio; private final HiveHadoop hiveHadoop; - private final AutoCloseableCloser closer = AutoCloseableCloser.create(); - private final Network network; - - private State state = State.INITIAL; - private MinioClient minioClient; - public Hive3MinioDataLake(String bucketName) { this(bucketName, HiveHadoop.HIVE3_IMAGE); @@ -61,18 +38,7 @@ public Hive3MinioDataLake(String bucketName, String hiveHadoopImage) public Hive3MinioDataLake(String bucketName, Map hiveHadoopFilesToMount, String hiveHadoopImage) { - this.bucketName = requireNonNull(bucketName, "bucketName is null"); - network = closer.register(newNetwork()); - this.minio = closer.register( - Minio.builder() - .withNetwork(network) - .withEnvVars(ImmutableMap.builder() - .put("MINIO_ACCESS_KEY", MINIO_ACCESS_KEY) - .put("MINIO_SECRET_KEY", MINIO_SECRET_KEY) - .put("MINIO_REGION", MINIO_DEFAULT_REGION) - .buildOrThrow()) - .build()); - + super(bucketName); HiveHadoop.Builder hiveHadoopBuilder = HiveHadoop.builder() .withImage(hiveHadoopImage) .withNetwork(network) @@ -80,77 +46,29 @@ public Hive3MinioDataLake(String bucketName, Map hiveHadoopFiles this.hiveHadoop = closer.register(hiveHadoopBuilder.build()); } + @Override public void start() { - checkState(state == State.INITIAL, "Already started: %s", state); - state = State.STARTING; - minio.start(); + super.start(); hiveHadoop.start(); - minioClient = closer.register(minio.createMinioClient()); - minio.createBucket(bucketName); - state = State.STARTED; - } - - public void stop() - throws Exception - { - closer.close(); - state = State.STOPPED; + state = STARTED; } - public Network getNetwork() - { - return network; - } - - public MinioClient getMinioClient() - { - checkState(state == State.STARTED, "Can't provide client when MinIO state is: %s", state); - return minioClient; - } - - public void copyResources(String resourcePath, String target) - { - minio.copyResources(resourcePath, bucketName, target); - } - - public void writeFile(byte[] contents, String target) - { - minio.writeFile(contents, bucketName, target); - } - - public List listFiles(String targetDirectory) - { - return getMinioClient().listObjects(getBucketName(), targetDirectory); - } - - public Minio getMinio() + @Override + public String runOnHive(String sql) { - return minio; + return hiveHadoop.runOnHive(sql); } + @Override public HiveHadoop getHiveHadoop() { return hiveHadoop; } - public String getBucketName() - { - return bucketName; - } - @Override - public void close() - throws Exception - { - stop(); - } - - private enum State + public URI getHiveMetastoreEndpoint() { - INITIAL, - STARTING, - STARTED, - STOPPED, + return hiveHadoop.getHiveMetastoreEndpoint(); } } diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/HiveMinioDataLake.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/HiveMinioDataLake.java new file mode 100644 index 00000000000..19bbc35d32a --- /dev/null +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/HiveMinioDataLake.java @@ -0,0 +1,135 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.hive.containers; + +import com.google.common.collect.ImmutableMap; +import io.trino.plugin.base.util.AutoCloseableCloser; +import io.trino.testing.containers.Minio; +import io.trino.testing.minio.MinioClient; +import org.testcontainers.containers.Network; + +import java.net.URI; +import java.util.List; + +import static com.google.common.base.Preconditions.checkState; +import static io.trino.plugin.hive.containers.HiveMinioDataLake.State.INITIAL; +import static io.trino.plugin.hive.containers.HiveMinioDataLake.State.STARTED; +import static io.trino.plugin.hive.containers.HiveMinioDataLake.State.STARTING; +import static io.trino.plugin.hive.containers.HiveMinioDataLake.State.STOPPED; +import static io.trino.testing.containers.Minio.MINIO_ACCESS_KEY; +import static io.trino.testing.containers.Minio.MINIO_SECRET_KEY; +import static java.util.Objects.requireNonNull; +import static org.testcontainers.containers.Network.newNetwork; + +public abstract class HiveMinioDataLake + implements AutoCloseable +{ + private static final String MINIO_DEFAULT_REGION = "us-east-1"; + + private final String bucketName; + private final Minio minio; + private MinioClient minioClient; + + protected final AutoCloseableCloser closer = AutoCloseableCloser.create(); + protected final Network network; + protected State state = INITIAL; + + public HiveMinioDataLake(String bucketName) + { + this.bucketName = requireNonNull(bucketName, "bucketName is null"); + this.network = closer.register(newNetwork()); + this.minio = closer.register( + Minio.builder() + .withNetwork(network) + .withEnvVars(ImmutableMap.builder() + .put("MINIO_ACCESS_KEY", MINIO_ACCESS_KEY) + .put("MINIO_SECRET_KEY", MINIO_SECRET_KEY) + .put("MINIO_REGION", MINIO_DEFAULT_REGION) + .buildOrThrow()) + .build()); + } + + public void start() + { + checkState(state == INITIAL, "Already started: %s", state); + state = STARTING; + minio.start(); + minioClient = closer.register(minio.createMinioClient()); + minio.createBucket(bucketName); + } + + public void stop() + throws Exception + { + closer.close(); + state = STOPPED; + } + + public Network getNetwork() + { + return network; + } + + public MinioClient getMinioClient() + { + checkState(state == STARTED, "Can't provide client when MinIO state is: %s", state); + return minioClient; + } + + public void copyResources(String resourcePath, String target) + { + minio.copyResources(resourcePath, bucketName, target); + } + + public void writeFile(byte[] contents, String target) + { + minio.writeFile(contents, bucketName, target); + } + + public List listFiles(String targetDirectory) + { + return getMinioClient().listObjects(getBucketName(), targetDirectory); + } + + public Minio getMinio() + { + return minio; + } + + public abstract String runOnHive(String sql); + + public abstract HiveHadoop getHiveHadoop(); + + public abstract URI getHiveMetastoreEndpoint(); + + public String getBucketName() + { + return bucketName; + } + + @Override + public void close() + throws Exception + { + stop(); + } + + protected enum State + { + INITIAL, + STARTING, + STARTED, + STOPPED, + } +} diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestHiveMetastoreCatalogs.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestHiveMetastoreCatalogs.java index cb6736f4b6b..23a18de4e7e 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestHiveMetastoreCatalogs.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/metastore/thrift/TestHiveMetastoreCatalogs.java @@ -79,7 +79,7 @@ private static Map buildHiveProperties(Hive3MinioDataLake hiveMi { return ImmutableMap.builder() .put("hive.metastore", "thrift") - .put("hive.metastore.uri", hiveMinioDataLake.getHiveHadoop().getHiveMetastoreEndpoint().toString()) + .put("hive.metastore.uri", hiveMinioDataLake.getHiveMetastoreEndpoint().toString()) .put("fs.hadoop.enabled", "false") .put("fs.native-s3.enabled", "true") .put("s3.path-style-access", "true") diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/S3HiveQueryRunner.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/S3HiveQueryRunner.java index 450acfda9c5..17724fc599a 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/S3HiveQueryRunner.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/S3HiveQueryRunner.java @@ -59,7 +59,7 @@ public static QueryRunner create( public static Builder builder(Hive3MinioDataLake hiveMinioDataLake) { return builder() - .setHiveMetastoreEndpoint(hiveMinioDataLake.getHiveHadoop().getHiveMetastoreEndpoint()) + .setHiveMetastoreEndpoint(hiveMinioDataLake.getHiveMetastoreEndpoint()) .setS3Endpoint("http://" + hiveMinioDataLake.getMinio().getMinioApiEndpoint()) .setS3Region(MINIO_REGION) .setS3AccessKey(MINIO_ACCESS_KEY) diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergMinioConnectorSmokeTest.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergMinioConnectorSmokeTest.java index 03aa104bd33..3144bd56bb0 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergMinioConnectorSmokeTest.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/BaseIcebergMinioConnectorSmokeTest.java @@ -71,7 +71,7 @@ protected QueryRunner createQueryRunner() ImmutableMap.builder() .put("iceberg.file-format", format.name()) .put("iceberg.catalog.type", "HIVE_METASTORE") - .put("hive.metastore.uri", hiveMinioDataLake.getHiveHadoop().getHiveMetastoreEndpoint().toString()) + .put("hive.metastore.uri", hiveMinioDataLake.getHiveMetastoreEndpoint().toString()) .put("hive.metastore.thrift.client.read-timeout", "1m") // read timed out sometimes happens with the default timeout .put("fs.hadoop.enabled", "false") .put("fs.native-s3.enabled", "true") @@ -258,7 +258,7 @@ protected void dropTableFromMetastore(String tableName) { HiveMetastore metastore = new BridgingHiveMetastore( testingThriftHiveMetastoreBuilder() - .metastoreClient(hiveMinioDataLake.getHiveHadoop().getHiveMetastoreEndpoint()) + .metastoreClient(hiveMinioDataLake.getHiveMetastoreEndpoint()) .build(this::closeAfterClass)); metastore.dropTable(schemaName, tableName, false); assertThat(metastore.getTable(schemaName, tableName)).isEmpty(); @@ -269,7 +269,7 @@ protected String getMetadataLocation(String tableName) { HiveMetastore metastore = new BridgingHiveMetastore( testingThriftHiveMetastoreBuilder() - .metastoreClient(hiveMinioDataLake.getHiveHadoop().getHiveMetastoreEndpoint()) + .metastoreClient(hiveMinioDataLake.getHiveMetastoreEndpoint()) .build(this::closeAfterClass)); return metastore .getTable(schemaName, tableName).orElseThrow() diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHiveCatalogWithHiveMetastore.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHiveCatalogWithHiveMetastore.java index 56b1876bc77..3085162c477 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHiveCatalogWithHiveMetastore.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHiveCatalogWithHiveMetastore.java @@ -130,7 +130,7 @@ protected TrinoCatalog createTrinoCatalog(boolean useUniqueTableLocations) .thriftMetastoreConfig(new ThriftMetastoreConfig() // Read timed out sometimes happens with the default timeout .setReadTimeout(new Duration(1, MINUTES))) - .metastoreClient(dataLake.getHiveHadoop().getHiveMetastoreEndpoint()) + .metastoreClient(dataLake.getHiveMetastoreEndpoint()) .build(closer::register); CachingHiveMetastore metastore = createPerTransactionCache(new BridgingHiveMetastore(thriftMetastore), 1000); fileSystem = fileSystemFactory.create(SESSION); From ea23b1a4b7a119db814d5b523a08f001ef7128f4 Mon Sep 17 00:00:00 2001 From: Mayank Vadariya <48036907+mayankvadariya@users.noreply.github.com> Date: Mon, 9 Dec 2024 18:58:04 -0500 Subject: [PATCH 4/8] Extract BaseTestHiveOnDataLake to reuse it across Hive3/4 test --- .../{TestHive3OnDataLake.java => BaseTestHiveOnDataLake.java} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename plugin/trino-hive/src/test/java/io/trino/plugin/hive/{TestHive3OnDataLake.java => BaseTestHiveOnDataLake.java} (99%) diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHive3OnDataLake.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseTestHiveOnDataLake.java similarity index 99% rename from plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHive3OnDataLake.java rename to plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseTestHiveOnDataLake.java index 13b4b768200..0874e9ee7a1 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHive3OnDataLake.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseTestHiveOnDataLake.java @@ -73,7 +73,7 @@ import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS; @TestInstance(PER_CLASS) -public class TestHive3OnDataLake +public class BaseTestHiveOnDataLake extends AbstractTestQueryFramework { private static final String HIVE_TEST_SCHEMA = "hive_datalake"; From 383cc474b45b3ff2e7b2a200aba6ae9826422248 Mon Sep 17 00:00:00 2001 From: Mayank Vadariya <48036907+mayankvadariya@users.noreply.github.com> Date: Thu, 12 Dec 2024 16:51:24 -0500 Subject: [PATCH 5/8] Add TestHive3OnDataLake test --- .../plugin/hive/BaseTestHiveOnDataLake.java | 75 +++++++++++-------- .../plugin/hive/TestHive3OnDataLake.java | 33 ++++++++ .../plugin/hive/s3/S3HiveQueryRunner.java | 3 +- 3 files changed, 78 insertions(+), 33 deletions(-) create mode 100644 plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHive3OnDataLake.java diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseTestHiveOnDataLake.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseTestHiveOnDataLake.java index 0874e9ee7a1..59df61051c6 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseTestHiveOnDataLake.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseTestHiveOnDataLake.java @@ -25,8 +25,7 @@ import io.trino.metastore.PartitionStatistics; import io.trino.metastore.PartitionWithStatistics; import io.trino.metastore.Table; -import io.trino.plugin.hive.containers.Hive3MinioDataLake; -import io.trino.plugin.hive.containers.HiveHadoop; +import io.trino.plugin.hive.containers.HiveMinioDataLake; import io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore; import io.trino.plugin.hive.s3.S3HiveQueryRunner; import io.trino.spi.connector.SchemaTableName; @@ -36,6 +35,7 @@ import io.trino.testing.AbstractTestQueryFramework; import io.trino.testing.QueryRunner; import io.trino.testing.minio.MinioClient; +import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; @@ -73,27 +73,31 @@ import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS; @TestInstance(PER_CLASS) -public class BaseTestHiveOnDataLake +abstract class BaseTestHiveOnDataLake extends AbstractTestQueryFramework { private static final String HIVE_TEST_SCHEMA = "hive_datalake"; private static final DataSize HIVE_S3_STREAMING_PART_SIZE = DataSize.of(5, MEGABYTE); - private String bucketName; - private Hive3MinioDataLake hiveMinioDataLake; + private final HiveMinioDataLake hiveMinioDataLake; + private final String bucketName; + private HiveMetastore metastoreClient; + public BaseTestHiveOnDataLake(String bucketName, HiveMinioDataLake hiveMinioDataLake) + { + this.bucketName = bucketName; + this.hiveMinioDataLake = hiveMinioDataLake; + } + @Override protected QueryRunner createQueryRunner() throws Exception { - this.bucketName = "test-hive-insert-overwrite-" + randomNameSuffix(); - this.hiveMinioDataLake = closeAfterClass( - new Hive3MinioDataLake(bucketName, HiveHadoop.HIVE3_IMAGE)); this.hiveMinioDataLake.start(); this.metastoreClient = new BridgingHiveMetastore( testingThriftHiveMetastoreBuilder() - .metastoreClient(this.hiveMinioDataLake.getHiveHadoop().getHiveMetastoreEndpoint()) + .metastoreClient(hiveMinioDataLake.getHiveMetastoreEndpoint()) .build(this::closeAfterClass)); return S3HiveQueryRunner.builder(hiveMinioDataLake) .addExtraProperty("sql.path", "hive.functions") @@ -124,6 +128,13 @@ public void setUp() computeActual("CREATE SCHEMA hive.functions"); } + @AfterAll + public void destroy() + throws Exception + { + hiveMinioDataLake.close(); + } + @Test public void testInsertOverwriteInTransaction() { @@ -456,7 +467,7 @@ public void testEnumPartitionProjectionOnVarcharColumnWithWhitespace() ")"); assertThat( - hiveMinioDataLake.getHiveHadoop() + hiveMinioDataLake .runOnHive("SHOW TBLPROPERTIES " + getHiveTestTableName(tableName))) .containsPattern("[ |]+projection\\.enabled[ |]+true[ |]+") .containsPattern("[ |]+projection\\.short name\\.type[ |]+enum[ |]+") @@ -533,7 +544,7 @@ public void testEnumPartitionProjectionOnVarcharColumnWithStorageLocationTemplat " partition_projection_location_template='" + storageFormat + "' " + ")"); assertThat( - hiveMinioDataLake.getHiveHadoop() + hiveMinioDataLake .runOnHive("SHOW TBLPROPERTIES " + getHiveTestTableName(schemaName, tableName))) .containsPattern("[ |]+projection\\.enabled[ |]+true[ |]+") .containsPattern("[ |]+storage\\.location\\.template[ |]+" + quote(storageFormat) + "[ |]+") @@ -553,7 +564,7 @@ public void testEnumPartitionProjectionOnVarcharColumnWithStorageLocationTemplat this.bucketName, HIVE_TEST_SCHEMA, tableName); - hiveMinioDataLake.getHiveHadoop().runOnHive( + hiveMinioDataLake.runOnHive( "CREATE TABLE " + getHiveTestTableName(tableName) + " ( " + " name varchar(25), " + " comment varchar(152), " + @@ -628,7 +639,7 @@ public void testEnumPartitionProjectionOnVarcharColumn() ")"); assertThat( - hiveMinioDataLake.getHiveHadoop() + hiveMinioDataLake .runOnHive("SHOW TBLPROPERTIES " + getHiveTestTableName(tableName))) .containsPattern("[ |]+projection\\.enabled[ |]+true[ |]+") .containsPattern("[ |]+projection\\.short_name1\\.type[ |]+enum[ |]+") @@ -685,7 +696,7 @@ public void testIntegerPartitionProjectionOnVarcharColumnWithDigitsAlignCreatedO " partition_projection_enabled=true " + ")"); assertThat( - hiveMinioDataLake.getHiveHadoop() + hiveMinioDataLake .runOnHive("SHOW TBLPROPERTIES " + getHiveTestTableName(tableName))) .containsPattern("[ |]+projection\\.enabled[ |]+true[ |]+") .containsPattern("[ |]+projection\\.short_name1\\.type[ |]+enum[ |]+") @@ -700,7 +711,7 @@ public void testIntegerPartitionProjectionOnVarcharColumnWithDigitsAlignCreatedO public void testIntegerPartitionProjectionOnVarcharColumnWithDigitsAlignCreatedOnHive() { String tableName = "nation_" + randomNameSuffix(); - hiveMinioDataLake.getHiveHadoop().runOnHive( + hiveMinioDataLake.runOnHive( "CREATE TABLE " + getHiveTestTableName(tableName) + " ( " + " name varchar(25), " + " comment varchar(152), " + @@ -783,7 +794,7 @@ public void testIntegerPartitionProjectionOnIntegerColumnWithInterval() ")"); assertThat( - hiveMinioDataLake.getHiveHadoop() + hiveMinioDataLake .runOnHive("SHOW TBLPROPERTIES " + getHiveTestTableName(tableName))) .containsPattern("[ |]+projection\\.enabled[ |]+true[ |]+") .containsPattern("[ |]+projection\\.short_name1\\.type[ |]+enum[ |]+") @@ -843,7 +854,7 @@ public void testIntegerPartitionProjectionOnIntegerColumnWithDefaults() ")"); assertThat( - hiveMinioDataLake.getHiveHadoop() + hiveMinioDataLake .runOnHive("SHOW TBLPROPERTIES " + getHiveTestTableName(tableName))) .containsPattern("[ |]+projection\\.enabled[ |]+true[ |]+") .containsPattern("[ |]+projection\\.short_name1\\.type[ |]+enum[ |]+") @@ -903,7 +914,7 @@ public void testDatePartitionProjectionOnDateColumnWithDefaults() ")"); assertThat( - hiveMinioDataLake.getHiveHadoop() + hiveMinioDataLake .runOnHive("SHOW TBLPROPERTIES " + getHiveTestTableName(tableName))) .containsPattern("[ |]+projection\\.enabled[ |]+true[ |]+") .containsPattern("[ |]+projection\\.short_name1\\.type[ |]+enum[ |]+") @@ -980,7 +991,7 @@ public void testDatePartitionProjectionOnTimestampColumnWithInterval() ")"); assertThat( - hiveMinioDataLake.getHiveHadoop() + hiveMinioDataLake .runOnHive("SHOW TBLPROPERTIES " + getHiveTestTableName(tableName))) .containsPattern("[ |]+projection\\.enabled[ |]+true[ |]+") .containsPattern("[ |]+projection\\.short_name1\\.type[ |]+enum[ |]+") @@ -1054,7 +1065,7 @@ public void testDatePartitionProjectionOnTimestampColumnWithIntervalExpressionCr " partition_projection_enabled=true " + ")"); assertThat( - hiveMinioDataLake.getHiveHadoop() + hiveMinioDataLake .runOnHive("SHOW TBLPROPERTIES " + getHiveTestTableName(tableName))) .containsPattern("[ |]+projection\\.enabled[ |]+true[ |]+") .containsPattern("[ |]+projection\\.short_name1\\.type[ |]+enum[ |]+") @@ -1071,7 +1082,7 @@ public void testDatePartitionProjectionOnTimestampColumnWithIntervalExpressionCr { String tableName = getRandomTestTableName(); String dateProjectionFormat = "yyyy-MM-dd HH:mm:ss"; - hiveMinioDataLake.getHiveHadoop().runOnHive( + hiveMinioDataLake.runOnHive( "CREATE TABLE " + getHiveTestTableName(tableName) + " ( " + " name varchar(25), " + " comment varchar(152), " + @@ -1153,7 +1164,7 @@ public void testDatePartitionProjectionOnVarcharColumnWithHoursInterval() ")"); assertThat( - hiveMinioDataLake.getHiveHadoop() + hiveMinioDataLake .runOnHive("SHOW TBLPROPERTIES " + getHiveTestTableName(tableName))) .containsPattern("[ |]+projection\\.enabled[ |]+true[ |]+") .containsPattern("[ |]+projection\\.short_name1\\.type[ |]+enum[ |]+") @@ -1227,7 +1238,7 @@ public void testDatePartitionProjectionOnVarcharColumnWithDaysInterval() ")"); assertThat( - hiveMinioDataLake.getHiveHadoop() + hiveMinioDataLake .runOnHive("SHOW TBLPROPERTIES " + getHiveTestTableName(tableName))) .containsPattern("[ |]+projection\\.enabled[ |]+true[ |]+") .containsPattern("[ |]+projection\\.short_name1\\.type[ |]+enum[ |]+") @@ -1300,7 +1311,7 @@ public void testDatePartitionProjectionOnVarcharColumnWithIntervalExpression() ")"); assertThat( - hiveMinioDataLake.getHiveHadoop() + hiveMinioDataLake .runOnHive("SHOW TBLPROPERTIES " + getHiveTestTableName(tableName))) .containsPattern("[ |]+projection\\.enabled[ |]+true[ |]+") .containsPattern("[ |]+projection\\.short_name1\\.type[ |]+enum[ |]+") @@ -1404,7 +1415,7 @@ public void testInjectedPartitionProjectionOnVarcharColumn() ")"); assertThat( - hiveMinioDataLake.getHiveHadoop() + hiveMinioDataLake .runOnHive("SHOW TBLPROPERTIES " + getHiveTestTableName(tableName))) .containsPattern("[ |]+projection\\.enabled[ |]+true[ |]+") .containsPattern("[ |]+projection\\.short_name1\\.type[ |]+enum[ |]+") @@ -1643,7 +1654,7 @@ public void testPartitionProjectionIgnore() String fullyQualifiedTestTableName = getFullyQualifiedTestTableName(tableName); // Create corrupted configuration - hiveMinioDataLake.getHiveHadoop().runOnHive( + hiveMinioDataLake.runOnHive( "CREATE TABLE " + hiveTestTableName + " ( " + " name varchar(25) " + ") PARTITIONED BY (" + @@ -1662,7 +1673,7 @@ public void testPartitionProjectionIgnore() "or '^\\s*NOW\\s*(([+-])\\s*([0-9]+)\\s*(DAY|HOUR|MINUTE|SECOND)S?\\s*)?$' that are sequential: Unparseable date: \"2001-01-01\""); // Append kill switch table property to ignore Partition Projection properties - hiveMinioDataLake.getHiveHadoop().runOnHive( + hiveMinioDataLake.runOnHive( "ALTER TABLE " + hiveTestTableName + " SET TBLPROPERTIES ( 'trino.partition_projection.ignore'='TRUE' )"); // Flush cache to get new definition computeActual("CALL system.flush_metadata_cache(schema_name => '" + HIVE_TEST_SCHEMA + "', table_name => '" + tableName + "')"); @@ -1966,9 +1977,9 @@ public void testUnsupportedDropSchemaCascadeWithNonHiveTable() String schemaName = "test_unsupported_drop_schema_cascade_" + randomNameSuffix(); String icebergTableName = "test_dummy_iceberg_table" + randomNameSuffix(); - hiveMinioDataLake.getHiveHadoop().runOnHive("CREATE DATABASE %2$s LOCATION 's3a://%1$s/%2$s'".formatted(bucketName, schemaName)); + hiveMinioDataLake.runOnHive("CREATE DATABASE %2$s LOCATION 's3a://%1$s/%2$s'".formatted(bucketName, schemaName)); try { - hiveMinioDataLake.getHiveHadoop().runOnHive("CREATE TABLE " + schemaName + "." + icebergTableName + " TBLPROPERTIES ('table_type'='iceberg') AS SELECT 1 a"); + hiveMinioDataLake.runOnHive("CREATE TABLE " + schemaName + "." + icebergTableName + " TBLPROPERTIES ('table_type'='iceberg') AS SELECT 1 a"); assertQueryFails("DROP SCHEMA " + schemaName + " CASCADE", "\\QCannot query Iceberg table '%s.%s'".formatted(schemaName, icebergTableName)); @@ -1977,7 +1988,7 @@ public void testUnsupportedDropSchemaCascadeWithNonHiveTable() assertThat(hiveMinioDataLake.getMinioClient().listObjects(bucketName, schemaName).stream()).isNotEmpty(); } finally { - hiveMinioDataLake.getHiveHadoop().runOnHive("DROP DATABASE IF EXISTS " + schemaName + " CASCADE"); + hiveMinioDataLake.runOnHive("DROP DATABASE IF EXISTS " + schemaName + " CASCADE"); } } @@ -1986,12 +1997,12 @@ public void testUnsupportedCommentOnHiveView() { String viewName = HIVE_TEST_SCHEMA + ".test_unsupported_comment_on_hive_view_" + randomNameSuffix(); - hiveMinioDataLake.getHiveHadoop().runOnHive("CREATE VIEW " + viewName + " AS SELECT 1 x"); + hiveMinioDataLake.runOnHive("CREATE VIEW " + viewName + " AS SELECT 1 x"); try { assertQueryFails("COMMENT ON COLUMN " + viewName + ".x IS NULL", "Hive views are not supported.*"); } finally { - hiveMinioDataLake.getHiveHadoop().runOnHive("DROP VIEW " + viewName); + hiveMinioDataLake.runOnHive("DROP VIEW " + viewName); } } diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHive3OnDataLake.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHive3OnDataLake.java new file mode 100644 index 00000000000..75efa817c54 --- /dev/null +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHive3OnDataLake.java @@ -0,0 +1,33 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.hive; + +import io.trino.plugin.hive.containers.Hive3MinioDataLake; +import io.trino.plugin.hive.containers.HiveHadoop; +import org.junit.jupiter.api.TestInstance; + +import static io.trino.testing.TestingNames.randomNameSuffix; +import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS; + +@TestInstance(PER_CLASS) +class TestHive3OnDataLake + extends BaseTestHiveOnDataLake +{ + private static final String BUCKET_NAME = "test-hive-insert-overwrite-" + randomNameSuffix(); + + public TestHive3OnDataLake() + { + super(BUCKET_NAME, new Hive3MinioDataLake(BUCKET_NAME, HiveHadoop.HIVE3_IMAGE)); + } +} diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/S3HiveQueryRunner.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/S3HiveQueryRunner.java index 17724fc599a..7149cc18911 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/S3HiveQueryRunner.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/S3HiveQueryRunner.java @@ -20,6 +20,7 @@ import io.airlift.units.Duration; import io.trino.plugin.hive.HiveQueryRunner; import io.trino.plugin.hive.containers.Hive3MinioDataLake; +import io.trino.plugin.hive.containers.HiveMinioDataLake; import io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore; import io.trino.plugin.hive.metastore.thrift.TestingTokenAwareMetastoreClientFactory; import io.trino.plugin.hive.metastore.thrift.ThriftMetastoreConfig; @@ -56,7 +57,7 @@ public static QueryRunner create( .build(); } - public static Builder builder(Hive3MinioDataLake hiveMinioDataLake) + public static Builder builder(HiveMinioDataLake hiveMinioDataLake) { return builder() .setHiveMetastoreEndpoint(hiveMinioDataLake.getHiveMetastoreEndpoint()) From 867d3c577c15893ed7afaf8a2548728efab59595 Mon Sep 17 00:00:00 2001 From: Mayank Vadariya <48036907+mayankvadariya@users.noreply.github.com> Date: Sun, 8 Dec 2024 20:20:17 -0500 Subject: [PATCH 6/8] Add S3 Hive4 query runner --- .../hive/containers/Hive4HiveServer.java | 95 ++++++++++++++++++ .../hive/containers/Hive4Metastore.java | 96 +++++++++++++++++++ .../hive/containers/Hive4MinioDataLake.java | 92 ++++++++++++++++++ .../plugin/hive/s3/S3HiveQueryRunner.java | 21 ++++ .../hive_minio_datalake/hive4-hive-site.xml | 68 +++++++++++++ 5 files changed, 372 insertions(+) create mode 100644 plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive4HiveServer.java create mode 100644 plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive4Metastore.java create mode 100644 plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive4MinioDataLake.java create mode 100644 plugin/trino-hive/src/test/resources/hive_minio_datalake/hive4-hive-site.xml diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive4HiveServer.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive4HiveServer.java new file mode 100644 index 00000000000..96af084ca48 --- /dev/null +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive4HiveServer.java @@ -0,0 +1,95 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.hive.containers; + +import com.google.common.collect.ImmutableSet; +import com.google.common.net.HostAndPort; +import io.airlift.log.Logger; +import io.trino.testing.containers.BaseTestContainer; +import org.testcontainers.containers.Network; + +import java.net.URI; +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +import static io.trino.plugin.hive.containers.Hive4Metastore.HIVE4_IMAGE; + +public class Hive4HiveServer + extends BaseTestContainer +{ + public static final int HIVE_SERVER_PORT = 10000; + + private static final Logger log = Logger.get(Hive4HiveServer.class); + private static final String HOST_NAME = "hiveserver2"; + + public static Builder builder() + { + return new Builder(); + } + + public static class Builder + extends BaseTestContainer.Builder + { + private Builder() + { + this.image = HIVE4_IMAGE; + this.hostName = HOST_NAME; + this.exposePorts = ImmutableSet.of(HIVE_SERVER_PORT); + } + + @Override + public Hive4HiveServer build() + { + return new Hive4HiveServer(image, hostName, exposePorts, filesToMount, envVars, network, startupRetryLimit); + } + } + + private Hive4HiveServer( + String image, + String hostName, + Set ports, + Map filesToMount, + Map envVars, + Optional network, + int startupRetryLimit) + { + super( + image, + hostName, + ports, + filesToMount, + envVars, + network, + startupRetryLimit); + } + + @Override + public void start() + { + super.start(); + log.info("Hive container started with addresses for hive server: %s", getHiveServerEndpoint()); + } + + public String runOnHive(String query) + { + return executeInContainerFailOnError("beeline", "-u", "jdbc:hive2://localhost:%s/default".formatted(HIVE_SERVER_PORT), "-n", "hive", "-e", query); + } + + public URI getHiveServerEndpoint() + { + HostAndPort address = getMappedHostAndPortForExposedPort(HIVE_SERVER_PORT); + return URI.create(address.getHost() + ":" + address.getPort()); + } +} diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive4Metastore.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive4Metastore.java new file mode 100644 index 00000000000..c7d01ca3634 --- /dev/null +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive4Metastore.java @@ -0,0 +1,96 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.hive.containers; + +import com.google.common.collect.ImmutableSet; +import com.google.common.net.HostAndPort; +import io.airlift.log.Logger; +import io.trino.testing.containers.BaseTestContainer; +import org.testcontainers.containers.Network; + +import java.net.URI; +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +import static io.trino.testing.TestingProperties.getDockerImagesVersion; + +public class Hive4Metastore + extends BaseTestContainer +{ + public static final String HIVE4_IMAGE = "ghcr.io/trinodb/testing/hive4.0-hive:" + getDockerImagesVersion(); + public static final int HIVE_METASTORE_PORT = 9083; + + private static final Logger log = Logger.get(HiveHadoop.class); + private static final String HOST_NAME = "metastore"; + + public static Builder builder() + { + return new Builder(); + } + + public static class Builder + extends BaseTestContainer.Builder + { + private Builder() + { + this.image = HIVE4_IMAGE; + this.hostName = HOST_NAME; + this.exposePorts = ImmutableSet.of(HIVE_METASTORE_PORT); + } + + @Override + public Hive4Metastore build() + { + return new Hive4Metastore(image, hostName, exposePorts, filesToMount, envVars, network, startupRetryLimit); + } + } + + private Hive4Metastore( + String image, + String hostName, + Set ports, + Map filesToMount, + Map envVars, + Optional network, + int startupRetryLimit) + { + super( + image, + hostName, + ports, + filesToMount, + envVars, + network, + startupRetryLimit); + } + + @Override + public void start() + { + super.start(); + log.info("Hive container started with addresses for metastore: %s", getHiveMetastoreEndpoint()); + } + + public URI getHiveMetastoreEndpoint() + { + HostAndPort address = getMappedHostAndPortForExposedPort(HIVE_METASTORE_PORT); + return URI.create("thrift://" + address.getHost() + ":" + address.getPort()); + } + + public URI getInternalHiveMetastoreEndpoint() + { + return URI.create("thrift://" + HOST_NAME + ":" + HIVE_METASTORE_PORT); + } +} diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive4MinioDataLake.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive4MinioDataLake.java new file mode 100644 index 00000000000..e58ea118889 --- /dev/null +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/containers/Hive4MinioDataLake.java @@ -0,0 +1,92 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.hive.containers; + +import com.google.common.collect.ImmutableMap; + +import java.net.URI; +import java.util.Map; +import java.util.Set; + +import static io.trino.plugin.hive.containers.Hive4HiveServer.HIVE_SERVER_PORT; +import static io.trino.plugin.hive.containers.Hive4Metastore.HIVE4_IMAGE; +import static io.trino.plugin.hive.containers.HiveMinioDataLake.State.STARTED; +import static io.trino.testing.containers.Minio.MINIO_ACCESS_KEY; +import static io.trino.testing.containers.Minio.MINIO_SECRET_KEY; +import static io.trino.testing.containers.TestContainers.getPathFromClassPathResource; + +public class Hive4MinioDataLake + extends HiveMinioDataLake +{ + private final Hive4HiveServer hiveServer; + private final Hive4Metastore hiveMetastore; + + public Hive4MinioDataLake(String bucketName) + { + super(bucketName); + String hiveImage = HIVE4_IMAGE; + Map hiveFilesToMount = ImmutableMap.of("/opt/hive/conf/hive-site.xml", getPathFromClassPathResource("hive_minio_datalake/hive4-hive-site.xml")); + // Separate hms and hiveserver(below) is created as standalone hiveserver doesn't expose embedded hms. https://github.com/apache/hive/blob/a1420ed816c315d98be7ebf05cdc3ba139a68643/packaging/src/docker/README.md?plain=1#L46. + // Run standalone metastore https://github.com/apache/hive/blob/a1420ed816c315d98be7ebf05cdc3ba139a68643/packaging/src/docker/README.md?plain=1#L105 + Hive4Metastore.Builder metastorebuilder = Hive4Metastore.builder() + .withImage(hiveImage) + .withEnvVars(Map.of("SERVICE_NAME", "metastore")) + .withNetwork(network) + .withExposePorts(Set.of(Hive4Metastore.HIVE_METASTORE_PORT)) + .withFilesToMount(hiveFilesToMount); + this.hiveMetastore = closer.register(metastorebuilder.build()); + + // Run hive server connecting to remote(above) metastore https://github.com/apache/hive/blob/a1420ed816c315d98be7ebf05cdc3ba139a68643/packaging/src/docker/README.md?plain=1#L139-L143 + Hive4HiveServer.Builder hiveHadoopBuilder = Hive4HiveServer.builder() + .withImage(hiveImage) + .withEnvVars(Map.of( + "SERVICE_NAME", "hiveserver2", + "HIVE_SERVER2_THRIFT_PORT", String.valueOf(HIVE_SERVER_PORT), + "SERVICE_OPTS", "-Xmx1G -Dhive.metastore.uris=%s".formatted(hiveMetastore.getInternalHiveMetastoreEndpoint()), + "IS_RESUME", "true", + "AWS_ACCESS_KEY_ID", MINIO_ACCESS_KEY, + "AWS_SECRET_KEY", MINIO_SECRET_KEY)) + .withNetwork(network) + .withExposePorts(Set.of(HIVE_SERVER_PORT)) + .withFilesToMount(hiveFilesToMount); + this.hiveServer = closer.register(hiveHadoopBuilder.build()); + } + + @Override + public void start() + { + super.start(); + hiveMetastore.start(); + hiveServer.start(); + state = STARTED; + } + + @Override + public String runOnHive(String sql) + { + return hiveServer.runOnHive(sql); + } + + @Override + public HiveHadoop getHiveHadoop() + { + throw new UnsupportedOperationException(); + } + + @Override + public URI getHiveMetastoreEndpoint() + { + return hiveMetastore.getHiveMetastoreEndpoint(); + } +} diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/S3HiveQueryRunner.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/S3HiveQueryRunner.java index 7149cc18911..ea246e127f5 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/S3HiveQueryRunner.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/s3/S3HiveQueryRunner.java @@ -20,6 +20,7 @@ import io.airlift.units.Duration; import io.trino.plugin.hive.HiveQueryRunner; import io.trino.plugin.hive.containers.Hive3MinioDataLake; +import io.trino.plugin.hive.containers.Hive4MinioDataLake; import io.trino.plugin.hive.containers.HiveMinioDataLake; import io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore; import io.trino.plugin.hive.metastore.thrift.TestingTokenAwareMetastoreClientFactory; @@ -187,4 +188,24 @@ public static void main(String[] args) log.info("======== SERVER STARTED ========"); log.info("\n====\n%s\n====", queryRunner.getCoordinator().getBaseUrl()); } + + public static class S3Hive4QueryRunner + { + public static void main(String[] args) + throws Exception + { + Hive4MinioDataLake hiveMinioDataLake = new Hive4MinioDataLake("tpch"); + hiveMinioDataLake.start(); + + QueryRunner queryRunner = S3HiveQueryRunner.builder(hiveMinioDataLake) + .addCoordinatorProperty("http-server.http.port", "8080") + .setHiveProperties(ImmutableMap.of("hive.security", "allow-all")) + .setSkipTimezoneSetup(true) + .setInitialTables(TpchTable.getTables()) + .build(); + Logger log = Logger.get(S3Hive4QueryRunner.class); + log.info("======== SERVER STARTED ========"); + log.info("\n====\n%s\n====", queryRunner.getCoordinator().getBaseUrl()); + } + } } diff --git a/plugin/trino-hive/src/test/resources/hive_minio_datalake/hive4-hive-site.xml b/plugin/trino-hive/src/test/resources/hive_minio_datalake/hive4-hive-site.xml new file mode 100644 index 00000000000..eec52b5f6ac --- /dev/null +++ b/plugin/trino-hive/src/test/resources/hive_minio_datalake/hive4-hive-site.xml @@ -0,0 +1,68 @@ + + + + hive.server2.enable.doAs + false + + + hive.tez.exec.inplace.progress + false + + + hive.exec.scratchdir + /opt/hive/scratch_dir + + + hive.user.install.directory + /opt/hive/install_dir + + + tez.runtime.optimize.local.fetch + true + + + hive.exec.submit.local.task.via.child + false + + + mapreduce.framework.name + local + + + hive.metastore.warehouse.dir + /opt/hive/data/warehouse + + + metastore.metastore.event.db.notification.api.auth + false + + + + + hive.users.in.admin.role + hive + + + + + fs.s3a.access.key + accesskey + + + fs.s3a.secret.key + secretkey + + + fs.s3a.endpoint + http://minio:4566 + + + fs.s3a.path.style.access + true + + + fs.s3.impl + org.apache.hadoop.fs.s3a.S3AFileSystem + + + From 371199c6d984cce7168615c14e8d1c5699b8ef23 Mon Sep 17 00:00:00 2001 From: Mayank Vadariya <48036907+mayankvadariya@users.noreply.github.com> Date: Sun, 8 Dec 2024 20:20:40 -0500 Subject: [PATCH 7/8] Add TestHive4OnDataLake test --- .../plugin/hive/TestHive4OnDataLake.java | 66 +++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHive4OnDataLake.java diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHive4OnDataLake.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHive4OnDataLake.java new file mode 100644 index 00000000000..379de81fcc9 --- /dev/null +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/TestHive4OnDataLake.java @@ -0,0 +1,66 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.hive; + +import io.trino.plugin.hive.containers.Hive4MinioDataLake; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.parallel.Execution; +import org.junit.jupiter.api.parallel.ExecutionMode; + +import static io.trino.testing.TestingNames.randomNameSuffix; +import static org.junit.jupiter.api.Assumptions.abort; + +@Execution(ExecutionMode.SAME_THREAD) // TODO Make custom hive4 image to support running queries concurrently +class TestHive4OnDataLake + extends BaseTestHiveOnDataLake +{ + private static final String BUCKET_NAME = "test-hive-insert-overwrite-" + randomNameSuffix(); + + public TestHive4OnDataLake() + { + super(BUCKET_NAME, new Hive4MinioDataLake(BUCKET_NAME)); + } + + @Override + @Test + public void testSyncPartitionOnBucketRoot() + { + // https://github.com/trinodb/trino/issues/24453 + abort("Fails with `location must not be root path`"); + } + + @Override + @Test + public void testUnpartitionedTableExternalLocationOnTopOfTheBucket() + { + // https://github.com/trinodb/trino/issues/24453 + abort("Fails with `location must not be root path`"); + } + + @Override + @Test + public void testPartitionedTableExternalLocationOnTopOfTheBucket() + { + // https://github.com/trinodb/trino/issues/24453 + abort("Fails with `location must not be root path`"); + } + + @Override + @Test + public void testInsertOverwritePartitionedAndBucketedAcidTable() + { + // https://github.com/trinodb/trino/issues/24454 + abort("Fails with `Processor has no capabilities, cannot create an ACID table`"); + } +} From 2dfb20cd2fc52093c84250031418d55e4c1c7d9f Mon Sep 17 00:00:00 2001 From: Mayank Vadariya <48036907+mayankvadariya@users.noreply.github.com> Date: Sun, 8 Dec 2024 20:21:07 -0500 Subject: [PATCH 8/8] Add TestTrinoHive4CatalogWithHiveMetastore test --- ...estTrinoHive4CatalogWithHiveMetastore.java | 27 +++++++++++++++++++ ...TestTrinoHiveCatalogWithHiveMetastore.java | 14 +++++++--- 2 files changed, 37 insertions(+), 4 deletions(-) create mode 100644 plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHive4CatalogWithHiveMetastore.java diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHive4CatalogWithHiveMetastore.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHive4CatalogWithHiveMetastore.java new file mode 100644 index 00000000000..7c4f0ae1e24 --- /dev/null +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHive4CatalogWithHiveMetastore.java @@ -0,0 +1,27 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.iceberg.catalog.hms; + +import io.trino.plugin.hive.containers.Hive4MinioDataLake; +import io.trino.plugin.hive.containers.HiveMinioDataLake; + +public class TestTrinoHive4CatalogWithHiveMetastore + extends TestTrinoHiveCatalogWithHiveMetastore +{ + @Override + HiveMinioDataLake hiveMinioDataLake() + { + return new Hive4MinioDataLake(bucketName); + } +} diff --git a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHiveCatalogWithHiveMetastore.java b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHiveCatalogWithHiveMetastore.java index 3085162c477..01f4b8a4c3e 100644 --- a/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHiveCatalogWithHiveMetastore.java +++ b/plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHiveCatalogWithHiveMetastore.java @@ -34,6 +34,7 @@ import io.trino.plugin.base.util.AutoCloseableCloser; import io.trino.plugin.hive.TrinoViewHiveMetastore; import io.trino.plugin.hive.containers.Hive3MinioDataLake; +import io.trino.plugin.hive.containers.HiveMinioDataLake; import io.trino.plugin.hive.metastore.cache.CachingHiveMetastore; import io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore; import io.trino.plugin.hive.metastore.thrift.ThriftMetastore; @@ -87,17 +88,22 @@ public class TestTrinoHiveCatalogWithHiveMetastore { private static final Logger LOG = Logger.get(TestTrinoHiveCatalogWithHiveMetastore.class); - private AutoCloseableCloser closer = AutoCloseableCloser.create(); + private final AutoCloseableCloser closer = AutoCloseableCloser.create(); // Use MinIO for storage, since HDFS is hard to get working in a unit test - private Hive3MinioDataLake dataLake; + private HiveMinioDataLake dataLake; private TrinoFileSystem fileSystem; - private String bucketName; + protected String bucketName; + + HiveMinioDataLake hiveMinioDataLake() + { + return new Hive3MinioDataLake(bucketName, HIVE3_IMAGE); + } @BeforeAll public void setUp() { bucketName = "test-hive-catalog-with-hms-" + randomNameSuffix(); - dataLake = closer.register(new Hive3MinioDataLake(bucketName, HIVE3_IMAGE)); + dataLake = closer.register(hiveMinioDataLake()); dataLake.start(); }