Skip to content

Commit

Permalink
Remove Hive integration tests for CDH 5
Browse files Browse the repository at this point in the history
  • Loading branch information
electrum committed Aug 12, 2022
1 parent 687bef3 commit 7031ef5
Show file tree
Hide file tree
Showing 5 changed files with 4 additions and 18 deletions.
1 change: 0 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,6 @@ jobs:
config:
- config-empty
- config-hdp3
# TODO: config-cdh5
# TODO: config-apache-hive3
timeout-minutes: 60
steps:
Expand Down
3 changes: 1 addition & 2 deletions docs/src/main/sphinx/connector/hive.rst
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,7 @@ The Hive connector requires a Hive metastore service (HMS), or a compatible
implementation of the Hive metastore, such as
`AWS Glue Data Catalog <https://aws.amazon.com/glue/>`_.

Apache Hadoop 2.x and 3.x are supported, along with derivative distributions,
including Cloudera CDH 5 and Hortonworks Data Platform (HDP).
Apache Hadoop HDFS 2.x and 3.x are supported.

Many distributed storage systems including HDFS,
:doc:`Amazon S3 <hive-s3>` or S3-compatible systems,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@
import org.testng.annotations.Test;

import java.io.IOException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import static org.assertj.core.api.Assertions.assertThat;

Expand All @@ -31,21 +29,12 @@ public void testCreatedByIsParsable()
String createdBy = ParquetWriter.formatCreatedBy("test-version");
// createdBy must start with "parquet-mr" to make Apache Hive perform timezone conversion on INT96 timestamps correctly
// when hive.parquet.timestamp.skip.conversion is set to true.
// Apache Hive 3.2 and above, and CDH 5 enable hive.parquet.timestamp.skip.conversion by default
// Apache Hive 3.2 and above enable hive.parquet.timestamp.skip.conversion by default
assertThat(createdBy).startsWith("parquet-mr");
VersionParser.ParsedVersion version = VersionParser.parse(createdBy);
assertThat(version).isNotNull();
assertThat(version.application).isEqualTo("parquet-mr-trino");
assertThat(version.version).isEqualTo("test-version");
assertThat(version.appBuildHash).isEqualTo("n/a");

// Ensure that createdBy field is parsable in CDH 5 to avoid the exception "parquet.io.ParquetDecodingException: Cannot read data due to PARQUET-246: to read safely, set parquet.split.files to false";
// the pattern is taken from https://github.com/cloudera/parquet-mr/blob/cdh5-1.5.0_5.15.1/parquet-common/src/main/java/parquet/VersionParser.java#L34
Pattern pattern = Pattern.compile("(.+) version ((.*) )?\\(build ?(.*)\\)");
Matcher matcher = pattern.matcher(createdBy);
assertThat(matcher.matches()).isTrue();
assertThat(matcher.group(1)).isEqualTo("parquet-mr-trino");
assertThat(matcher.group(3)).isEqualTo("test-version");
assertThat(matcher.group(4)).isEqualTo("n/a");
}
}
1 change: 0 additions & 1 deletion plugin/trino-hive-hadoop2/conf/hive-tests-config-cdh5.sh

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ public static Optional<CompressionCodec> getCompressionCodec(TextInputFormat inp
private static Class<? extends InputFormat<?, ?>> getInputFormatClass(JobConf conf, String inputFormatName)
throws ClassNotFoundException
{
// CDH uses different names for Parquet
// legacy names for Parquet
if ("parquet.hive.DeprecatedParquetInputFormat".equals(inputFormatName) ||
"parquet.hive.MapredParquetInputFormat".equals(inputFormatName)) {
return MapredParquetInputFormat.class;
Expand Down Expand Up @@ -462,7 +462,7 @@ public static Deserializer getDeserializer(Configuration configuration, Properti

private static Class<? extends Deserializer> getDeserializerClass(String name)
{
// CDH uses different names for Parquet
// legacy name for Parquet
if ("parquet.hive.serde.ParquetHiveSerDe".equals(name)) {
return ParquetHiveSerDe.class;
}
Expand Down

0 comments on commit 7031ef5

Please sign in to comment.