diff --git a/core/src/main/scala/org/apache/spark/sql/delta/commands/RestoreTableCommand.scala b/core/src/main/scala/org/apache/spark/sql/delta/commands/RestoreTableCommand.scala index 1377ee48955..382ef14b211 100644 --- a/core/src/main/scala/org/apache/spark/sql/delta/commands/RestoreTableCommand.scala +++ b/core/src/main/scala/org/apache/spark/sql/delta/commands/RestoreTableCommand.scala @@ -26,6 +26,7 @@ import org.apache.spark.sql.delta.actions.{AddFile, RemoveFile} import org.apache.spark.sql.delta.catalog.DeltaTableV2 import org.apache.spark.sql.delta.sources.DeltaSQLConf import org.apache.spark.sql.delta.util.DeltaFileOperations.absolutePath +import org.apache.hadoop.fs.Path import org.apache.spark.sql.{Dataset, Row, SparkSession} import org.apache.spark.sql.catalyst.TableIdentifier @@ -244,7 +245,7 @@ case class RestoreTableCommand( val spark: SparkSession = files.sparkSession - val path = deltaLog.dataPath + val pathString = deltaLog.dataPath.toString val hadoopConf = spark.sparkContext.broadcast( new SerializableConfiguration(deltaLog.newDeltaHadoopConf())) @@ -252,9 +253,9 @@ case class RestoreTableCommand( val missedFiles = files .mapPartitions { files => + val path = new Path(pathString) val fs = path.getFileSystem(hadoopConf.value.value) - val pathStr = path.toUri.getPath - files.filterNot(f => fs.exists(absolutePath(pathStr, f.path))) + files.filterNot(f => fs.exists(absolutePath(pathString, f.path))) } .map(_.path) .head(100)