Skip to content

Commit

Permalink
rebase code, address comments
Browse files Browse the repository at this point in the history
  • Loading branch information
xiarixiaoyao committed Jan 11, 2023
1 parent d716114 commit 8ed791f
Show file tree
Hide file tree
Showing 4 changed files with 4 additions and 28 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -253,11 +253,7 @@ private boolean evaluatePartitionPredicate(
if (partitionPathValue.equals("default")) {
return true;
}

if (columnPredicate.intersect(domain).isNone()) {
return false;
}
return true;
return !columnPredicate.intersect(domain).isNone();
}
else {
// Should not happen
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ private static HudiSplitWeightProvider createSplitWeightProvider(ConnectorSessio
return HudiSplitWeightProvider.uniformStandardWeightProvider();
}

private static HoodieTableQueryType getQueryType(String inputFormat)
public static HoodieTableQueryType getQueryType(String inputFormat)
{
// TODO support incremental query
switch (inputFormat) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -187,11 +187,8 @@ private static DistributedQueryRunner createHudiQueryRunner(Map<String, String>
.build();

// setup file metastore

Path catalogDirectory = queryRunner.getCoordinator().getDataDirectory().resolve("catalog");

metastore = createFileHiveMetastore(catalogDirectory.toString());

// create database
Database database = Database.builder()
.setDatabaseName(HUDI_SCHEMA)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import net.jpountz.xxhash.XXHashFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hudi.common.engine.HoodieLocalEngineContext;
import org.apache.hudi.common.model.HoodieTableQueryType;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.testng.annotations.Test;

Expand All @@ -40,7 +39,7 @@
import static org.testng.Assert.assertEquals;

/**
* Integration tests for reading Delta tables.
* Integration tests for reading hudi tables.
*/
public class TestHudiSkipping
extends AbstractHudiDistributedQueryTestBase
Expand Down Expand Up @@ -87,7 +86,7 @@ public void testPartitionPruneAndFileSkipping()
HudiSessionProperties.getHoodieFilesystemViewSpillableDir(connectorSession),
engineContext,
metaClient,
getQueryType(table.get().getStorage().getStorageFormat().getInputFormat()),
HudiSplitManager.getQueryType(table.get().getStorage().getStorageFormat().getInputFormat()),
Optional.empty());
// case1: no filter
assertEquals(hudiFileSkippingManager.listQueryFiles(TupleDomain.all()).entrySet().stream().map(entry -> entry.getValue().size()).reduce(0, Integer::sum), 4);
Expand Down Expand Up @@ -116,22 +115,6 @@ public void testSkippingResult()
});
}

private HoodieTableQueryType getQueryType(String hudiInputFormat)
{
switch (hudiInputFormat) {
case "org.apache.hudi.hadoop.realtime.HoodieParquetRealtimeInputFormat":
case "com.uber.hoodie.hadoop.realtime.HoodieRealtimeInputFormat":
// mor rt table
return HoodieTableQueryType.SNAPSHOT;
case "org.apache.hudi.hadoop.HoodieParquetInputFormat":
case "com.uber.hoodie.hadoop.HoodieInputFormat":
// cow table/ mor ro table
return HoodieTableQueryType.READ_OPTIMIZED;
default:
throw new IllegalArgumentException(String.format("failed to infer query type for current inputFormat: %s", hudiInputFormat));
}
}

// should remove this function, once we bump hudi to 0.13.0.
// old hudi-presto-bundle has not include lz4 which is used by data-skipping.
private void shouldRemoved()
Expand Down

0 comments on commit 8ed791f

Please sign in to comment.