Skip to content

Commit

Permalink
fixing compilation issue
Browse files Browse the repository at this point in the history
  • Loading branch information
nsivabalan committed Apr 19, 2023
1 parent 08ec5be commit 4064bbe
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ object HoodieSparkSqlTestBase {
.setBasePath(tablePath)
.build()

val cleanInstant = metaClient.getActiveTimeline.getCleanerTimeline.filterCompletedInstants().lastInstant().get()
val cleanInstant = metaClient.reloadActiveTimeline().getCleanerTimeline.filterCompletedInstants().lastInstant().get()
TimelineMetadataUtils.deserializeHoodieCleanMetadata(metaClient
.getActiveTimeline.getInstantDetails(cleanInstant).get)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package org.apache.spark.sql.hudi

import org.apache.hudi.DataSourceWriteOptions._
import org.apache.hudi.avro.model.HoodieCleanMetadata
import org.apache.hudi.avro.model.{HoodieCleanMetadata, HoodieCleanPartitionMetadata}
import org.apache.hudi.{HoodieCLIUtils, HoodieSparkUtils}
import org.apache.hudi.common.model.HoodieCommitMetadata
import org.apache.hudi.common.table.HoodieTableMetaClient
Expand Down Expand Up @@ -139,7 +139,13 @@ class TestAlterTableDropPartition extends HoodieSparkSqlTestBase {
.collect()

val cleanMetadata: HoodieCleanMetadata = getLastCleanMetadata(spark, tablePath)
assertTrue(cleanMetadata.totalFilesDeleted > 0)
val cleanPartitionMeta = new java.util.ArrayList(cleanMetadata.getPartitionMetadata.values()).toArray()
var totalDeletedFiles = 0
cleanPartitionMeta.foreach(entry =>
{
totalDeletedFiles += entry.asInstanceOf[HoodieCleanPartitionMetadata].getSuccessDeleteFiles.size()
})
assertTrue(totalDeletedFiles > 0)

val partitionPath = if (urlencode) {
PartitionPathEncodeUtils.escapePathName("2021/10/01")
Expand Down Expand Up @@ -319,7 +325,13 @@ class TestAlterTableDropPartition extends HoodieSparkSqlTestBase {
.collect()

val cleanMetadata: HoodieCleanMetadata = getLastCleanMetadata(spark, tablePath)
assertTrue(cleanMetadata.totalFilesDeleted > 0)
val cleanPartitionMeta = new java.util.ArrayList(cleanMetadata.getPartitionMetadata.values()).toArray()
var totalDeletedFiles = 0
cleanPartitionMeta.foreach(entry =>
{
totalDeletedFiles += entry.asInstanceOf[HoodieCleanPartitionMetadata].getSuccessDeleteFiles.size()
})
assertTrue(totalDeletedFiles > 0)

checkAnswer(s"select id, name, ts, year, month, day from $tableName")(
Seq(2, "l4", "v1", "2021", "10", "02")
Expand Down Expand Up @@ -389,7 +401,13 @@ class TestAlterTableDropPartition extends HoodieSparkSqlTestBase {
.collect()

val cleanMetadata: HoodieCleanMetadata = getLastCleanMetadata(spark, tablePath)
assertTrue(cleanMetadata.totalFilesDeleted > 0)
val cleanPartitionMeta = new java.util.ArrayList(cleanMetadata.getPartitionMetadata.values()).toArray()
var totalDeletedFiles = 0
cleanPartitionMeta.foreach(entry =>
{
totalDeletedFiles += entry.asInstanceOf[HoodieCleanPartitionMetadata].getSuccessDeleteFiles.size()
})
assertTrue(totalDeletedFiles > 0)

// insert data
spark.sql(s"""insert into $tableName values (2, "l4", "v1", "2021", "10", "02")""")
Expand Down

0 comments on commit 4064bbe

Please sign in to comment.