Skip to content

Commit

Permalink
Address review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
codope committed Mar 30, 2022
1 parent 63fdb81 commit 0912c87
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -69,5 +69,5 @@ public interface HoodieTableMetadataWriter extends Serializable, AutoCloseable {
* @param partitions - list of {@link MetadataPartitionType} to drop
* @throws IOException
*/
void dropPartitions(String instantTime, List<MetadataPartitionType> partitions);
void deletePartitions(String instantTime, List<MetadataPartitionType> partitions);
}
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ protected void commit(String instantTime, Map<MetadataPartitionType, HoodieData<
}

@Override
public void dropPartitions(String instantTime, List<MetadataPartitionType> partitions) {
public void deletePartitions(String instantTime, List<MetadataPartitionType> partitions) {
throw new HoodieNotSupportedException("Dropping metadata index not supported for Flink metadata table yet.");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -183,9 +183,9 @@ protected void commit(String instantTime, Map<MetadataPartitionType, HoodieData<
}

@Override
public void dropPartitions(String instantTime, List<MetadataPartitionType> partitions) {
public void deletePartitions(String instantTime, List<MetadataPartitionType> partitions) {
List<String> partitionsToDrop = partitions.stream().map(MetadataPartitionType::getPartitionPath).collect(Collectors.toList());
LOG.warn("Deleting Metadata Table partitions: " + partitionsToDrop);
LOG.info("Deleting Metadata Table partitions: " + partitionsToDrop);

try (SparkRDDWriteClient writeClient = new SparkRDDWriteClient(engineContext, metadataWriteConfig, true)) {
String actionType = CommitUtils.getCommitActionType(WriteOperationType.DELETE_PARTITION, HoodieTableType.MERGE_ON_READ);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -482,12 +482,11 @@ public void testMetadataTableDeletePartition(HoodieTableType tableType) throws I
records = dataGen.generateInserts(newCommitTime, 10);
writeStatuses = client.upsert(jsc.parallelize(records, 1), newCommitTime).collect();
assertNoWriteErrors(writeStatuses);
validateMetadata(client);

// metadata writer to delete column_stats partition
HoodieBackedTableMetadataWriter metadataWriter = metadataWriter(client);
assertNotNull(metadataWriter, "MetadataWriter should have been initialized");
metadataWriter.dropPartitions("0000003", Arrays.asList(MetadataPartitionType.COLUMN_STATS));
metadataWriter.deletePartitions("0000003", Arrays.asList(MetadataPartitionType.COLUMN_STATS));

HoodieTableMetaClient metadataMetaClient = HoodieTableMetaClient.builder().setConf(hadoopConf).setBasePath(metadataTableBasePath).build();
List<String> metadataTablePartitions = FSUtils.getAllPartitionPaths(engineContext, metadataMetaClient.getBasePath(), false, false);
Expand Down

0 comments on commit 0912c87

Please sign in to comment.