Skip to content

Commit

Permalink
Fixing a little more
Browse files Browse the repository at this point in the history
  • Loading branch information
Alexey Kudinkin committed Apr 4, 2022
1 parent 270cab1 commit a4c391f
Show file tree
Hide file tree
Showing 4 changed files with 9 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -172,11 +172,17 @@ public void testWriteReadHFileWithMetaFields(boolean populateMetaFields, boolean
for (int i = 0; i < 2; i++) {
int randomRowstoFetch = 5 + RANDOM.nextInt(10);
Set<String> rowsToFetch = getRandomKeys(randomRowstoFetch, keys);

List<String> rowsList = new ArrayList<>(rowsToFetch);
Collections.sort(rowsList);

List<GenericRecord> expectedRecords = rowsList.stream().map(recordMap::get).collect(Collectors.toList());

hoodieHFileReader = (HoodieHFileReader<GenericRecord>) createReader(conf);
List<GenericRecord> result = HoodieHFileReader.readRecords(hoodieHFileReader, rowsList);
assertEquals(recordMap.values(), result);

assertEquals(expectedRecords, result);

result.forEach(entry -> {
if (populateMetaFields && testAvroWithMeta) {
assertNotNull(entry.get(HoodieRecord.RECORD_KEY_METADATA_FIELD));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,6 @@
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.util.Time;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.IndexedRecord;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.avro.model.HoodieMetadataRecord;
import org.apache.hudi.common.config.HoodieMetadataConfig;
Expand Down Expand Up @@ -51,8 +51,6 @@
import org.apache.hudi.metadata.MetadataPartitionType;
import org.apache.hudi.table.HoodieSparkTable;
import org.apache.hudi.table.HoodieTable;

import org.apache.hadoop.fs.FileStatus;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.parquet.avro.AvroSchemaConverter;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -344,6 +344,7 @@ public static <R extends IndexedRecord> List<R> readRecords(HoodieHFileReader<R>
public static <R extends IndexedRecord> List<R> readRecords(HoodieHFileReader<R> reader,
List<String> keys,
Schema schema) throws IOException {
Collections.sort(keys);
return toStream(reader.getRecordsByKeysIterator(keys, schema))
.collect(Collectors.toList());
}
Expand Down

0 comments on commit a4c391f

Please sign in to comment.