Skip to content

Commit

Permalink
Add TestTrinoHive4CatalogWithHiveMetastore test
Browse files Browse the repository at this point in the history
  • Loading branch information
mayankvadariya committed Dec 13, 2024
1 parent 0139c5e commit c38b2b6
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 4 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.iceberg.catalog.hms;

import io.trino.plugin.hive.containers.Hive4MinioDataLake;
import io.trino.plugin.hive.containers.HiveMinioDataLake;

public class TestTrinoHive4CatalogWithHiveMetastore
extends TestTrinoHiveCatalogWithHiveMetastore
{
@Override
HiveMinioDataLake hiveMinioDataLake()
{
return new Hive4MinioDataLake(bucketName);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import io.trino.plugin.base.util.AutoCloseableCloser;
import io.trino.plugin.hive.TrinoViewHiveMetastore;
import io.trino.plugin.hive.containers.Hive3MinioDataLake;
import io.trino.plugin.hive.containers.HiveMinioDataLake;
import io.trino.plugin.hive.metastore.cache.CachingHiveMetastore;
import io.trino.plugin.hive.metastore.thrift.BridgingHiveMetastore;
import io.trino.plugin.hive.metastore.thrift.ThriftMetastore;
Expand Down Expand Up @@ -87,17 +88,22 @@ public class TestTrinoHiveCatalogWithHiveMetastore
{
private static final Logger LOG = Logger.get(TestTrinoHiveCatalogWithHiveMetastore.class);

private AutoCloseableCloser closer = AutoCloseableCloser.create();
private final AutoCloseableCloser closer = AutoCloseableCloser.create();
// Use MinIO for storage, since HDFS is hard to get working in a unit test
private Hive3MinioDataLake dataLake;
private HiveMinioDataLake dataLake;
private TrinoFileSystem fileSystem;
private String bucketName;
protected String bucketName;

HiveMinioDataLake hiveMinioDataLake()
{
return new Hive3MinioDataLake(bucketName, HIVE3_IMAGE);
}

@BeforeAll
public void setUp()
{
bucketName = "test-hive-catalog-with-hms-" + randomNameSuffix();
dataLake = closer.register(new Hive3MinioDataLake(bucketName, HIVE3_IMAGE));
dataLake = closer.register(hiveMinioDataLake());
dataLake.start();
}

Expand Down

0 comments on commit c38b2b6

Please sign in to comment.