Skip to content

Commit

Permalink
Added UT for ConfigUtils.toMap and handled more edge cases
Browse files Browse the repository at this point in the history
  • Loading branch information
voonhou.su committed Jul 4, 2022
1 parent 25ea8db commit 2b03a2d
Show file tree
Hide file tree
Showing 6 changed files with 58 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,6 @@ public static HiveSyncConfig buildSyncConfig(Configuration conf) {
hiveSyncConfig.decodePartition = conf.getBoolean(FlinkOptions.URL_ENCODE_PARTITIONING);
hiveSyncConfig.skipROSuffix = conf.getBoolean(FlinkOptions.HIVE_SYNC_SKIP_RO_SUFFIX);
hiveSyncConfig.assumeDatePartitioning = conf.getBoolean(FlinkOptions.HIVE_SYNC_ASSUME_DATE_PARTITION);
hiveSyncConfig.withOperationField = conf.getBoolean(FlinkOptions.CHANGELOG_ENABLED);
return hiveSyncConfig;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ public abstract class AbstractHiveSyncHoodieClient extends AbstractSyncHoodieCli
protected final PartitionValueExtractor partitionValueExtractor;

public AbstractHiveSyncHoodieClient(HiveSyncConfig syncConfig, Configuration hadoopConf, FileSystem fs) {
super(syncConfig.basePath, syncConfig.assumeDatePartitioning, syncConfig.useFileListingFromMetadata, syncConfig.withOperationField, fs);
super(syncConfig.basePath, syncConfig.assumeDatePartitioning, syncConfig.useFileListingFromMetadata, fs);
this.syncConfig = syncConfig;
this.hadoopConf = hadoopConf;
this.partitionValueExtractor = ReflectionUtils.loadClass(syncConfig.partitionValueExtractorClass);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,6 @@ public class HiveSyncConfig extends HoodieSyncConfig {
@Parameter(names = {"--spark-schema-length-threshold"}, description = "The maximum length allowed in a single cell when storing additional schema information in Hive's metastore.")
public int sparkSchemaLengthThreshold;

@Parameter(names = {"--with-operation-field"}, description = "Whether to include the '_hoodie_operation' field in the metadata fields")
public Boolean withOperationField = false;

@Parameter(names = {"--sync-comment"}, description = "synchronize table comments to hive")
public boolean syncComment = false;

Expand Down Expand Up @@ -273,7 +270,6 @@ public String toString() {
+ ", createManagedTable=" + createManagedTable
+ ", syncAsSparkDataSourceTable=" + syncAsSparkDataSourceTable
+ ", sparkSchemaLengthThreshold=" + sparkSchemaLengthThreshold
+ ", withOperationField=" + withOperationField
+ ", isConditionalSync=" + isConditionalSync
+ ", sparkVersion=" + sparkVersion
+ ", syncComment=" + syncComment
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,22 +56,20 @@ public abstract class AbstractSyncHoodieClient implements AutoCloseable {
private final String basePath;
private final boolean assumeDatePartitioning;
private final boolean useFileListingFromMetadata;
private final boolean withOperationField;

@Deprecated
public AbstractSyncHoodieClient(String basePath, boolean assumeDatePartitioning, boolean useFileListingFromMetadata,
boolean verifyMetadataFileListing, boolean withOperationField, FileSystem fs) {
this(basePath, assumeDatePartitioning, useFileListingFromMetadata, withOperationField, fs);
boolean verifyMetadataFileListing, FileSystem fs) {
this(basePath, assumeDatePartitioning, useFileListingFromMetadata, fs);
}

public AbstractSyncHoodieClient(String basePath, boolean assumeDatePartitioning, boolean useFileListingFromMetadata,
boolean withOperationField, FileSystem fs) {
FileSystem fs) {
this.metaClient = HoodieTableMetaClient.builder().setConf(fs.getConf()).setBasePath(basePath).setLoadActiveTimelineOnLoad(true).build();
this.tableType = metaClient.getTableType();
this.basePath = basePath;
this.assumeDatePartitioning = assumeDatePartitioning;
this.useFileListingFromMetadata = useFileListingFromMetadata;
this.withOperationField = withOperationField;
this.fs = fs;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,10 @@ public static Map<String, String> toMap(String keyValueConfig) {
String[] keyvalues = keyValueConfig.split("\n");
Map<String, String> tableProperties = new HashMap<>();
for (String keyValue : keyvalues) {
// Handle multiple new lines and lines that contain only spaces after splitting
if (keyValue.trim().isEmpty()) {
continue;
}
String[] keyValueArray = keyValue.split("=");
if (keyValueArray.length == 1 || keyValueArray.length == 2) {
String key = keyValueArray[0].trim();
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
package org.apache.hudi.sync.common.util;

import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;

import java.util.HashMap;
import java.util.Map;
import org.junit.jupiter.api.Test;

public class TestConfigUtils {

@Test
public void testToMapSucceeds() {
Map<String, String> expectedMap = new HashMap<>();
expectedMap.put("k.1.1.2", "v1");
expectedMap.put("k.2.1.2", "v2");
expectedMap.put("k.3.1.2", "v3");

// Test base case
String srcKv = "k.1.1.2=v1\nk.2.1.2=v2\nk.3.1.2=v3";
Map<String, String> outMap = ConfigUtils.toMap(srcKv);
assertEquals(expectedMap, outMap);

// Test ends with new line
srcKv = "k.1.1.2=v1\nk.2.1.2=v2\nk.3.1.2=v3\n";
outMap = ConfigUtils.toMap(srcKv);
assertEquals(expectedMap, outMap);

// Test delimited by multiple new lines
srcKv = "k.1.1.2=v1\nk.2.1.2=v2\n\nk.3.1.2=v3";
outMap = ConfigUtils.toMap(srcKv);
assertEquals(expectedMap, outMap);

// Test delimited by multiple new lines with spaces in between
srcKv = "k.1.1.2=v1\n \nk.2.1.2=v2\n\nk.3.1.2=v3";
outMap = ConfigUtils.toMap(srcKv);
assertEquals(expectedMap, outMap);

// Test with random spaces if trim works properly
srcKv = " k.1.1.2 = v1\n k.2.1.2 = v2 \nk.3.1.2 = v3";
outMap = ConfigUtils.toMap(srcKv);
assertEquals(expectedMap, outMap);
}

@Test
public void testToMapThrowError() {
String srcKv = "k.1.1.2=v1=v1.1\nk.2.1.2=v2\nk.3.1.2=v3";
assertThrows(IllegalArgumentException.class, () -> ConfigUtils.toMap(srcKv));
}
}

0 comments on commit 2b03a2d

Please sign in to comment.