Skip to content

Commit

Permalink
Add TestTrinoHive4CatalogWithHiveMetastore test
Browse files Browse the repository at this point in the history
  • Loading branch information
mayankvadariya committed Dec 11, 2024
1 parent 1e3c71c commit c235742
Show file tree
Hide file tree
Showing 2 changed files with 69 additions and 4 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.iceberg.catalog.hms;

import io.trino.plugin.base.util.AutoCloseableCloser;
import io.trino.plugin.hive.containers.Hive4MinioDataLake;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;

import java.net.URI;

public class TestTrinoHive4CatalogWithHiveMetastore
extends TestTrinoHiveCatalogWithHiveMetastore
{
private final AutoCloseableCloser closer = AutoCloseableCloser.create();
private Hive4MinioDataLake dataLake;

@BeforeAll
public void setUp()

Check failure on line 30 in plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHive4CatalogWithHiveMetastore.java

View workflow job for this annotation

GitHub Actions / error-prone-checks

setUp overrides method in TestTrinoHiveCatalogWithHiveMetastore; expected @OverRide

Check failure on line 30 in plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHive4CatalogWithHiveMetastore.java

View workflow job for this annotation

GitHub Actions / error-prone-checks

setUp overrides method in TestTrinoHiveCatalogWithHiveMetastore; expected @OverRide
{
dataLake = closer.register(new Hive4MinioDataLake(bucketName));
dataLake.start();

Check failure on line 33 in plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHive4CatalogWithHiveMetastore.java

View workflow job for this annotation

GitHub Actions / test (plugin/trino-iceberg)

TestTrinoHive4CatalogWithHiveMetastore.

Can't get Docker image: RemoteDockerImage(imageName=ghcr.io/trinodb/testing/hive4.0-hive:latest, imagePullPolicy=DefaultPullPolicy(), imageNameSubstitutor=org.testcontainers.utility.ImageNameSubstitutor$LogWrappedImageNameSubstitutor@3dc01b1d)
}

@AfterAll
public void tearDown()

Check failure on line 37 in plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHive4CatalogWithHiveMetastore.java

View workflow job for this annotation

GitHub Actions / error-prone-checks

tearDown overrides method in TestTrinoHiveCatalogWithHiveMetastore; expected @OverRide

Check failure on line 37 in plugin/trino-iceberg/src/test/java/io/trino/plugin/iceberg/catalog/hms/TestTrinoHive4CatalogWithHiveMetastore.java

View workflow job for this annotation

GitHub Actions / error-prone-checks

tearDown overrides method in TestTrinoHiveCatalogWithHiveMetastore; expected @OverRide
throws Exception
{
dataLake = null;
closer.close();
}

@Override
protected URI hiveMetastoreEndpoint()
{
return dataLake.getHiveMetastore().getHiveMetastoreEndpoint();
}

@Override
protected String minioAddress()
{
return dataLake.getMinio().getMinioAddress();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@
import org.junit.jupiter.api.parallel.Execution;

import java.io.IOException;
import java.net.URI;
import java.util.List;
import java.util.Map;
import java.util.Optional;
Expand Down Expand Up @@ -86,17 +87,16 @@ public class TestTrinoHiveCatalogWithHiveMetastore
extends BaseTrinoCatalogTest
{
private static final Logger LOG = Logger.get(TestTrinoHiveCatalogWithHiveMetastore.class);
protected static final String bucketName = "test-hive-catalog-with-hms-" + randomNameSuffix();

private AutoCloseableCloser closer = AutoCloseableCloser.create();
// Use MinIO for storage, since HDFS is hard to get working in a unit test
private HiveMinioDataLake dataLake;
private TrinoFileSystem fileSystem;
private String bucketName;

@BeforeAll
public void setUp()
{
bucketName = "test-hive-catalog-with-hms-" + randomNameSuffix();
dataLake = closer.register(new HiveMinioDataLake(bucketName, HIVE3_IMAGE));
dataLake.start();
}
Expand All @@ -117,7 +117,7 @@ protected TrinoCatalog createTrinoCatalog(boolean useUniqueTableLocations)
new HdfsConfigurationInitializer(
new HdfsConfig(),
Set.of(new TrinoS3ConfigurationInitializer(new HiveS3Config()
.setS3Endpoint(dataLake.getMinio().getMinioAddress())
.setS3Endpoint(minioAddress())
.setS3SslEnabled(false)
.setS3AwsAccessKey(MINIO_ACCESS_KEY)
.setS3AwsSecretKey(MINIO_SECRET_KEY)
Expand All @@ -130,7 +130,7 @@ protected TrinoCatalog createTrinoCatalog(boolean useUniqueTableLocations)
.thriftMetastoreConfig(new ThriftMetastoreConfig()
// Read timed out sometimes happens with the default timeout
.setReadTimeout(new Duration(1, MINUTES)))
.metastoreClient(dataLake.getHiveHadoop().getHiveMetastoreEndpoint())
.metastoreClient(hiveMetastoreEndpoint())
.build(closer::register);
CachingHiveMetastore metastore = createPerTransactionCache(new BridgingHiveMetastore(thriftMetastore), 1000);
fileSystem = fileSystemFactory.create(SESSION);
Expand Down Expand Up @@ -229,6 +229,16 @@ public void testCreateMaterializedView()
}
}

protected URI hiveMetastoreEndpoint()
{
return dataLake.getHiveHadoop().getHiveMetastoreEndpoint();
}

protected String minioAddress()
{
return dataLake.getMinio().getMinioAddress();
}

@Override
protected Map<String, Object> defaultNamespaceProperties(String namespaceName)
{
Expand Down

0 comments on commit c235742

Please sign in to comment.