Skip to content

Commit

Permalink
[HUDI-6407] Rename USE_LOG_RECORD_READER_SCAN_V2 in HoodieRealtimeCon…
Browse files Browse the repository at this point in the history
…fig (#9011)
  • Loading branch information
yihua authored Jun 19, 2023
1 parent a0340a1 commit 94adcf2
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,6 @@

package org.apache.hudi.testutils;

import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.apache.hudi.common.util.CollectionUtils;
Expand All @@ -31,6 +26,12 @@
import org.apache.hudi.hadoop.config.HoodieRealtimeConfig;
import org.apache.hudi.hadoop.utils.HoodieRealtimeRecordReaderUtils;

import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.mapred.JobConf;

import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;
Expand Down Expand Up @@ -88,7 +89,7 @@ public static void assertDataInMORTable(HoodieWriteConfig config, String instant
.collect(Collectors.toList());

jobConf.set(String.format(HOODIE_CONSUME_COMMIT, config.getTableName()), instant1);
jobConf.set(HoodieRealtimeConfig.USE_LOG_RECORD_READER_SCAN_V2, "true");
jobConf.set(HoodieRealtimeConfig.ENABLE_OPTIMIZED_LOG_BLOCKS_SCAN, "true");
List<GenericRecord> records = HoodieMergeOnReadTestUtils.getRecordsUsingInputFormat(
hadoopConf, fullPartitionPaths, config.getBasePath(), jobConf, true);
Map<String, GenericRecord> prevRecordsMap = records.stream()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@

package org.apache.hudi.hadoop.config;

import org.apache.hudi.common.config.HoodieMetadataConfig;

/**
* Class to hold props related to Hoodie RealtimeInputFormat and RealtimeRecordReader.
*/
Expand All @@ -39,5 +41,6 @@ public final class HoodieRealtimeConfig {
public static final String SPILLABLE_MAP_BASE_PATH_PROP = "hoodie.memory.spillable.map.path";
// Default file path prefix for spillable file
public static final String DEFAULT_SPILLABLE_MAP_BASE_PATH = "/tmp/";
public static final String USE_LOG_RECORD_READER_SCAN_V2 = "hoodie.log.record.reader.use.scanV2";
public static final String ENABLE_OPTIMIZED_LOG_BLOCKS_SCAN =
"hoodie" + HoodieMetadataConfig.OPTIMIZED_LOG_BLOCKS_SCAN;
}
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ private HoodieMergedLogRecordScanner getMergedLogRecordScanner() throws IOExcept
.withDiskMapType(jobConf.getEnum(HoodieCommonConfig.SPILLABLE_DISK_MAP_TYPE.key(), HoodieCommonConfig.SPILLABLE_DISK_MAP_TYPE.defaultValue()))
.withBitCaskDiskMapCompressionEnabled(jobConf.getBoolean(HoodieCommonConfig.DISK_MAP_BITCASK_COMPRESSION_ENABLED.key(),
HoodieCommonConfig.DISK_MAP_BITCASK_COMPRESSION_ENABLED.defaultValue()))
.withOptimizedLogBlocksScan(jobConf.getBoolean(HoodieRealtimeConfig.USE_LOG_RECORD_READER_SCAN_V2, false))
.withOptimizedLogBlocksScan(jobConf.getBoolean(HoodieRealtimeConfig.ENABLE_OPTIMIZED_LOG_BLOCKS_SCAN, false))
.withInternalSchema(schemaEvolutionContext.internalSchemaOption.orElse(InternalSchema.getEmptyInternalSchema()))
.build();
}
Expand Down

0 comments on commit 94adcf2

Please sign in to comment.